def get_rss2(url): try: with XmlReader.Create(url) as reader: return [ RSSItem( i.Title.Text, i.Summary.Text, i.Links[0].Uri.AbsoluteUri if i.Links.Count > 0 else "" ) for i in SyndicationFeed.Load(reader).Items ] except XmlException: wc = WebClient() wc.Encoding = UTF8 xmlstr = wc.DownloadString(url) xdoc = XmlDocument() xdoc.LoadXml(xmlstr) xelem = xdoc.DocumentElement titles = [ i.InnerText.Replace("\n", "").Replace("\r", "") for i in xelem.SelectNodes("//item//title")] links = [i.InnerText for i in xelem.SelectNodes("//item//link")] descriptions = [ i.InnerText for i in xelem.SelectNodes("//item//description") ] return [ RSSItem(t, d, l) for t, d, l in zip(titles, descriptions, links) ]
def get_rss2(url): try: with XmlReader.Create(url) as reader: return [ RSSItem( i.Title.Text, i.Summary.Text, i.Links[0].Uri.AbsoluteUri if i.Links.Count > 0 else "") for i in SyndicationFeed.Load(reader).Items ] except XmlException: wc = WebClient() wc.Encoding = UTF8 xmlstr = wc.DownloadString(url) xdoc = XmlDocument() xdoc.LoadXml(xmlstr) xelem = xdoc.DocumentElement titles = [ i.InnerText.Replace("\n", "").Replace("\r", "") for i in xelem.SelectNodes("//item//title") ] links = [i.InnerText for i in xelem.SelectNodes("//item//link")] descriptions = [ i.InnerText for i in xelem.SelectNodes("//item//description") ] return [ RSSItem(t, d, l) for t, d, l in zip(titles, descriptions, links) ]
def __init__(self, canvas): self.canvas = canvas self.findRootVisual() self.rand = Random() self.RedrawScreen(0) wc = WebClient() wc.DownloadStringCompleted += self.xamlDownloaded wc.DownloadStringAsync(Uri('star.xaml', UriKind.Relative))
def _get_htmldoc(url, encode=UTF8): """指定したURLのHTMLテキストを取得。失敗するとNoneを返す""" wc = WebClient() wc.Encoding = encode try: html = wc.DownloadString(url) except WebException as ex: return None htmlDoc = HtmlDocument() htmlDoc.LoadHtml(html) return htmlDoc
def pastebin(title, text): client = WebClient() client.Headers["User-Agent"] = PASTEBIN_AGENT params = NameValueCollection() params["data"] = text params["language"] = "text" params["name"] = PASTEBIN_AUTHOR params["title"] = title params["private"] = PASTEBIN_PRIVATE params["expire"] = PASTEBIN_EXPIRE pg = client.UploadValues("http://pastebin.kde.org/api/json/create", "POST", params) g = ASCII.GetString(pg) print g result = json.loads(g)["result"] return "http://pastebin.kde.org/%s/%s" % (result["id"], result["hash"])
def server(*urlcomponents): url = "/%s/" % serverurl + "/".join(urlcomponents) result = [False, None, None] def callback(s, e): result[0] = True result[1] = e.Error if not result[1]: result[2] = e.Result webclient = WebClient() webclient.DownloadStringCompleted += callback webclient.DownloadStringAsync(Uri(url, UriKind.Relative)) while not result[0]: sleep(0.5) if result[1]: raise IOError, "Problem accessing web server\n%s" % result[1] return result[2]
def _GetExecutor(self, contentType, executionType, methodType, acceptType=None): client = WebClient() client.Headers.Add("Authorization", "Basic " + self.odataCredentials) client.Headers.Add("x-csrf-token", self.csrf.token) client.Headers.Add("Cookie", ";".join(self.csrf.cookies)) client.Headers.Add("Content-Type", contentType) if acceptType is not None: client.Headers.Add("Accept", acceptType) actions = { "Sync": { "Download": client.DownloadString, "Upload": client.UploadString }, "Async": { "Download": client.DownloadStringTaskAsync, "Upload": client.UploadStringTaskAsync } } return actions[executionType][methodType]
def download_tts(file_path, text, settings): with WebClient() as wc: url = "https://translate.google.com/translate_tts?ie=UTF-8&tl={1}&client=tw-ob&q={0}".format( HttpUtility.UrlEncode(text), settings["lang"]) wc.Headers["Referer"] = "http://translate.google.com/" wc.Headers["User-Agent"] = "stagefright/1.2 (Linux;Android 5.0)" wc.DownloadFile(url, file_path)
def get_prj_text_from_EPSG(EPSG): """Get prj string by epsg.io using .Net.""" path = "https://epsg.io/" + str(EPSG) + ".wkt" web_client = WebClient() try: web_client.Headers.Add( "user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; .NET CLR 1.0.3705;)" ) prj_text = web_client.DownloadString(path) except: raise Exception( "Query failed. Please, copy and paste https://epsg.io/?{0}.wkt on your browser, check if text is showed. If yes, use it with gh." .format(EPSG)) return prj_text
def _getCsrf(self): ''' Get csrf token from c4c ''' Log.Write("reloading csrf token") client = WebClient() client.Headers.Add("Authorization", "Basic " + self.odataCredentials) client.Headers.Add("x-csrf-token", "fetch") client.Headers.Add("Content-Type", "application/json") result = client.DownloadString( "https://my{0}.crm.ondemand.com/sap/c4c/odata/v1/c4codataapi/$metadata" .format(self.sapId)) csrf = Objects.Dynamic() csrf.token = client.ResponseHeaders["x-csrf-token"] csrf.cookies = client.ResponseHeaders["set-cookie"].split(",") return csrf
def ping(url): import logging try: import System from System.Net import CredentialCache from System.Net import WebClient client = WebClient() client.Credentials = CredentialCache.DefaultCredentials res = client.DownloadString(url) logging.info(url) except SystemError, ex: logging.error(ex)
def process(self): empty = False try: client = WebClient() client.Encoding = Encoding.UTF8 client.Headers['Accept'] = 'text/html' client.Headers[ 'User-Agent'] = 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)' body = client.DownloadString( 'http://search.twitter.com/search/thread/%d' % self.status.Id) divs = re.findall(r'<div class="msg">(.*?)</div>', body, re.S) if divs: for div in divs: match = re.search( r'<a[^>]*>(.*?)</a>.*<span[^>]*>(.*?)</span>', div) name = match.group(1) text = re.sub(r'<[^>]*>', '', match.group(2)) self.notice(text, nick=name) else: empty = True except WebException, e: if e.Response.StatusCode == 404: # クロールされていないかプロテクトか empty = True else: raise
def image_from_remote(source): """Construct an image from a remote source. Parameters ---------- source : str The url of the remote source. Returns ------- System.Drawing.Image Representation of an miage in memory. Examples -------- .. code-block:: python image = image_from_remote('http://block.arch.ethz.ch/brg/images/cache/dsc02360_ni-2_cropped_1528706473_624x351.jpg') """ w = WebClient() d = w.DownloadData(source) m = MemoryStream(d) return Eto.Drawing.Bitmap(m)
def process(self): empty = False try: client = WebClient() client.Encoding = Encoding.UTF8 client.Headers['Accept'] = 'text/html' client.Headers['User-Agent'] = 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)' body = client.DownloadString('http://search.twitter.com/search/thread/%d' % self.status.Id) divs = re.findall(r'<div class="msg">(.*?)</div>', body, re.S) if divs: for div in divs: match = re.search(r'<a[^>]*>(.*?)</a>.*<span[^>]*>(.*?)</span>', div) name = match.group(1) text = re.sub(r'<[^>]*>', '', match.group(2)) self.notice(text, nick=name) else: empty = True except WebException, e: if e.Response.StatusCode == 404: # クロールされていないかプロテクトか empty = True else: raise
def GetBytes(url): 'download the file at url' return WebClient().DownloadData(url)
def ShortenUrl(long_url): request_url = "http://api.bit.ly/shorten?version=2.0.1&format=xml&longUrl=%s&login=%s&apiKey=%s" % ( Utility.UrlEncode(long_url), username, apikey) client = WebClient() response = client.DownloadString(request_url) return re_shorten.search(response).group(1)
""" silverwebel - A Cinfony module for Silverlight that runs on web services Global variables: informats - a dictionary of supported input formats outformats - a dictionary of supported output formats fps - a list of supported fingerprint types """ import re from time import sleep # .NET classes from System.Net import WebClient from System import Uri, UriKind _webclient = WebClient() tk = None informats = { "smi": "SMILES", "inchikey": "InChIKey", "inchi": "InChI", "name": "Common name" } """A dictionary of supported input formats""" outformats = { "smi": "SMILES", "cdxml": "ChemDraw XML", "inchi": "InChI", "sdf": "Symyx SDF",
def GetBytes(self, url): return WebClient().DownloadData(url)
## Bring in STDLIB >>> import sys >>> sys.path.append("C:\IronPython-2.7.7\LibR.docx") >>> import os ## Launching external process from System.Diagnostics import Process Process.Start('powershell.exe', '') ## Download of file over TLS from System.Net import WebClient wc = WebClient() from System.Net import ServicePointManager, SecurityProtocolType ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12 wc.DownloadFile("https://github.com/gentilkiwi/mimikatz/releases/download/2.1.1-20180322/mimikatz_trunk.zip", "mimikatz.zip") import clr from System.Net import WebClient from System.Net import ServicePointManager, SecurityProtocolType with WebClient() as wc: ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12 wc.DownloadFile("https://github.com/gentilkiwi/mimikatz/releases/download/2.1.1-20180322/mimikatz_trunk.zip", "mimikatz.zip") wc.DownloadData("url") from System.IO import MemoryStream clr.AddReference("System.IO.Compression")
category_list = [] # Populating Material List and Downloading the images: for item in fabric_df: image_approval = item["fields"]["Approval"] material_name = item["fields"]["Fabric_Code_Ref"] material_category = item["fields"]["Material_Type"] if image_approval != "Rejected": material_list.append(material_name) category_list.append(material_category) print("Downloading: " + material_name) try: image_url = item["fields"]["Fabric_Image"][0]["url"] image_ext = image_url.split("/")[-1].split(".")[-1] map_path.append(folder_path + "\\" + material_name + "." + image_ext) wc = WebClient() wc.DownloadFile( image_url, folder_path + "\\" + material_name + "." + image_ext ) except KeyError: print("-" * 10 + material_name + " has no image") map_path.append(None) else: print("-" * 5 + image_approval + "-" * 5 + material_name) # Fetching Materials with db.Transaction("Creating Bulk Materials"): for i, item in enumerate(material_list): mat = basefabric.Duplicate(item) mat.AppearanceAssetId = basefabricA_asset.Duplicate(item).Id fabric_texture_path = map_path[i]
def GetWebRequest(self, address): request = WebClient.GetWebRequest(self, address) if request is not None and self._Cookies is not None: request.CookieContainer = self._Cookies return request
from System import DateTime from System import DateTime, TimeSpan, DayOfWeek from datetime import date from System.Net import HttpWebRequest import time from Spotfire.Dxp.Data.Import import TextFileDataSource, TextDataReaderSettings clr.AddReference('System.Web.Extensions') from System.Web.Script.Serialization import JavaScriptSerializer from System.Net import WebClient # Create a web client client = WebClient() # Download the results of that URL results = client.DownloadString("http://localhost:8888/spotfireFramework/assetMarketPlace/customLibs/visTimeline/dxp/eventsGroup.csv") # print these results print results stream = MemoryStream() writer = StreamWriter(stream) writer.Write(results) writer.Flush() stream.Seek(0, SeekOrigin.Begin)
# This example shows how a remote DLL can be loaded. Also, how DynamicDLL can be loaded by Typhoon # import clr clr.AddReference("System") clr.AddReference("System.IO.Compression") from System.Net import WebClient from System.Net import ServicePointManager, SecurityProtocolType ## We could downlaod a zip and unwrap it: # from System.IO import Stream, StreamReader, MemoryStream, BinaryWriter, FileStream, File # from System.IO.Compression import ZipArchive, ZipArchiveMode ## Download a dll to memory from the remote endpoint, load it up in memroy with WebClient() as wc: ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12 dll = wc.DownloadData("http://127.0.0.1:8000/tmpB1B9.dll") ## This is how we get .Net and Python typres from an object print(".Net Type: ", clr.GetClrType(type(dll)) ) print("Python Type: ", type(dll) ) from System.Reflection import Assembly # Byte loading assembly clr.AddReference(Assembly.Load(dll)) from Typhoon.Extensions import ClipboardManager cm = ClipboardManager() cm.RunCode()
def web_put(url,data,token): System.Net.ServicePointManager.SecurityProtocol = System.Net.SecurityProtocolType.Tls12 client = WebClient() client.Headers.Add('User-Agent','Iron Python') client.Headers.Add('Authorization','Basic ' + token) return client.UploadString(url,'PUT',data)
def ShortenUrl(long_url): request_url = "http://bit.ly/?url=%s" % (Utility.UrlEncode(long_url)) client = WebClient() response = client.DownloadString(request_url) return re_shorten.search(response).group(1)