def install_via_tarballs(): if os.path.isdir("rustc-install"): shutil.rmtree("rustc-install") host_fname = 'rustc-nightly-' + host + '.tar.gz' download.get(url + '/' + host_fname, host_fname) download.unpack(host_fname, "rustc-install", quiet=True) os.remove(host_fname) if extra != None: extra_fname = 'rustc-nightly-' + extra + '.tar.gz' print("adding target libs for " + extra) download.get(url + '/' + extra_fname, extra_fname) manifest = open("rustc-install/rustc/manifest.in", "a") folder = extra_fname.replace(".tar.gz", "") with contextlib.closing(tarfile.open(extra_fname)) as tar: for p in tar.getnames(): if not "rustc/lib/rustlib/" + extra in p: continue name = p.replace(folder + "/", "", 1) dst = "rustc-install/" + name f = tar.extract(p, "rustc-install") tp = os.path.join("rustc-install", p) if os.path.isdir(tp) and os.path.exists(dst): continue shutil.move(tp, dst) if not os.path.isdir(dst): manifest.write(p.replace(folder + "/rustc/", "file:") + "\n") shutil.rmtree("rustc-install/" + folder) os.remove(extra_fname) if os.path.isdir("rustc"): shutil.rmtree("rustc") os.rename("rustc-install/rustc", "rustc") shutil.rmtree("rustc-install")
def install_via_tarballs(): if os.path.isdir("rustc-install"): shutil.rmtree("rustc-install") host_fname = 'rustc-nightly-' + host + '.tar.gz' download.get(url + '/' + host_fname, host_fname) download.unpack(host_fname, "rustc-install", quiet=True) os.remove(host_fname) if extra != None: extra_fname = 'rustc-nightly-' + extra + '.tar.gz' print("adding target libs for " + extra) download.get(url + '/' + extra_fname, extra_fname) folder = extra_fname.replace(".tar.gz", "") with contextlib.closing(tarfile.open(extra_fname)) as tar: for p in tar.getnames(): if not "rustc/" + libdir + "/rustlib/" + extra in p: continue name = p.replace(folder + "/", "", 1) dst = "rustc-install/" + name tar.extract(p, "rustc-install") tp = os.path.join("rustc-install", p) if os.path.isdir(tp) and os.path.exists(dst): continue shutil.move(tp, dst) shutil.rmtree("rustc-install/" + folder) os.remove(extra_fname) if os.path.isdir("rustc"): shutil.rmtree("rustc") os.rename("rustc-install/rustc", "rustc") shutil.rmtree("rustc-install")
def download_button(self): self.app_name = self.ui.listAppsWidget.currentItem().text() output = self.ui.FileNameLineEdit.text() extract = self.ui.ExtractAppCheckbox.isChecked() self.ui.progressBar.setValue(25) download.get(app_name=self.app_name, output=output, extract=extract) self.ui.progressBar.setValue(100)
def do_download(self,key,valuedict,savedir='/mnt/hgfs/share/stars'): if not key: return if not valuedict: return dirname = os.path.join(savedir,key) makedir(dirname) dirpictures = os.path.join(dirname,'pictures') makedir(dirpictures) piclist = valuedict['pictures'] i=0 for pic in piclist: i = i+1 #name = key+str(i)+os.path.splitext(pic)[1] name = key+str(i)+'.jpg' pathname = os.path.join(dirpictures,name) try: download.get(pic,pathname) except Exception as e: i = i-1 print e dirrelation = os.path.join(dirname,'relation') relationdict = valuedict['reletion'] download.get_dict(relationdict,dirrelation) dirproductions = os.path.join(dirname,'productions') producpicdict = valuedict['productions'] download.get_dict(producpicdict,dirproductions)
def getSimilarityValue(url1, url2): page1 = get(url1) page2 = get(url2) dom1 = BeautifulSoup(page1.text, "html5lib") dom2 = BeautifulSoup(page2.text, "html5lib") return getSimilarity(dom1, dom2)
def download_button(self): self.app_name = self.ui.listAppsWidget.currentItem().text() self.status_message("Downloading " + self.app_name + " from Open Shop Channel..") output = self.ui.FileNameLineEdit.text() extract = self.ui.ExtractAppCheckbox.isChecked() self.ui.progressBar.setValue(25) console_output = io.StringIO() with redirect_stdout(console_output): download.get(app_name=self.app_name, output=output, extract=extract) self.ui.progressBar.setValue(100) self.status_message(escape_ansi(console_output.getvalue()))
def run_download(self): ''' Download the <source_url> to the <download_dir>. ''' with cd(self.get_rundir('download')): rc = download.get(self.vars['source_url'],self.vars['download_dir'], self.vars.get('download_final')) return rc
def install_via_tarballs(): if os.path.isdir("rustc-install"): shutil.rmtree("rustc-install") # Download the compiler host_fname = 'rustc-nightly-' + host + '.tar.gz' download.get(url + '/' + host_fname, host_fname) download.unpack(host_fname, "rustc-install", quiet=True, strip=2) os.remove(host_fname) # Download all target libraries needed for target in targets: fetch_std(target) if os.path.isdir("rustc"): shutil.rmtree("rustc") os.rename("rustc-install", "rustc")
def download_button(self): data = self.ui.listAppsWidget.currentItem().data(Qt.UserRole) self.app_name = data[0] self.status_message( f"Downloading {self.app_name} from Open Shop Channel..") path_to_file, _ = QFileDialog.getSaveFileName( None, 'Save Application', self.ui.FileNameLineEdit.text()) output = path_to_file self.ui.progressBar.setValue(25) console_output = io.StringIO() if output != '': with redirect_stdout(console_output): download.get(app_name=self.app_name, repo=HOST, output=output) self.ui.progressBar.setValue(100) self.status_message(utils.escape_ansi(console_output.getvalue())) else: self.ui.progressBar.setValue(0) self.status_message("Cancelled Download.")
def run(url): """ Consume a url and orchestrates the classification process :param url: :return: classification object """ path_to_file = download.get(url) retresult = caffenet.classify(path_to_file) return retresult
def dl_list(file, display="False", repo="hbb1.oscwii.org"): for line in open(file): # if anyone has any idea how to make this less hacky then please help try: line = line.rstrip("\n\r") except Exception: pass try: line = line.rstrip("\n") except Exception: pass if line is "": pass else: if display is True: print(line) else: if parsecontents.query(term=line, repo=repo) is True: metadata.get(app_name=line, type="default", repo=repo) download.get(app_name=line, repo=repo)
help="Repository name (e.g. primary)", default="primary") # Show arguments show.add_argument("app", type=str, help="App to show. (e.g. WiiVNC)") show.add_argument("-r", "--host", type=str, help="Repository name (e.g. primary)", default="primary") args = parser.parse_args() # Get if args.cmd == "get": download.get(app_name=args.app, repo=repos.name(args.host)["host"]) # Send if args.cmd == "send": # get hostname of host host_url = repos.name(args.host)["host"] ok = wiiload.validate_ip_regex(ip=args.destination) if not ok: print( f"Error: The address '{args.destination}' is invalid! Please correct it!" ) exit(1) url = f"https://{host_url}/hbb/{args.app}/{args.app}.zip" r = requests.get(url)
import subprocess import sys import hashlib import download date = sys.argv[1] print(date) if not os.path.isdir('target/dl'): os.makedirs('target/dl') snaps = { 'macos-i386': 'i686-apple-darwin', 'macos-x86_64': 'x86_64-apple-darwin', 'linux-i386': 'i686-unknown-linux-gnu', 'linux-x86_64': 'x86_64-unknown-linux-gnu', 'winnt-i386': 'i686-pc-windows-gnu', 'winnt-x86_64': 'x86_64-pc-windows-gnu', 'bitrig-x86_64': 'x86_64-unknown-bitrig', } for platform in sorted(snaps): triple = snaps[platform] tarball = 'cargo-nightly-' + triple + '.tar.gz' url = 'https://static.rust-lang.org/cargo-dist/' + date + '/' + tarball dl_path = "target/dl/" + tarball download.get(url, dl_path, quiet=True) h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() print(' ' + platform + ' ' + h)
def getPages(links): pages = {} for link in links: pages[link] = get(link) print link return pages
def fetch_std(target): fname = 'rust-std-nightly-' + target + '.tar.gz' print("adding target libs for " + target) download.get(url + '/' + fname, fname) download.unpack(fname, "rustc-install", quiet=True, strip=2) os.remove(fname)
def run(): ''' Start a console version of this application. ''' # Command line parser options. usage = "usage: %prog [-c|-d|-j|--convert|--rconvert] [options] arg1 arg2 ..." parser = optparse.OptionParser(version=checker.ABOUT, usage=usage) parser.add_option("--download", "-d", action="store_true", dest="download", help=_("Actually download the file(s) in the metalink")) parser.add_option("--check", "-c", action="store_true", dest="check", help=_("Check the metalink file URLs")) #parser.add_option("--file", "-f", dest="filevar", metavar="FILE", help=_("Metalink file to check or file to download")) parser.add_option("--timeout", "-t", dest="timeout", metavar="TIMEOUT", help=_("Set timeout in seconds to wait for response (default=10)")) parser.add_option("--os", "-o", dest="os", metavar="OS", help=_("Operating System preference")) parser.add_option("--no-segmented", "-s", action="store_true", dest="nosegmented", help=_("Do not use the segmented download method")) parser.add_option("--lang", "-l", dest="language", metavar="LANG", help=_("Language preference (ISO-639/3166)")) parser.add_option("--country", dest="country", metavar="LOC", help=_("Two letter country preference (ISO 3166-1 alpha-2)")) parser.add_option("--pgp-keys", "-k", dest="pgpdir", metavar="DIR", help=_("Directory with the PGP keys that you trust (default: working directory)")) parser.add_option("--pgp-store", "-p", dest="pgpstore", metavar="FILE", help=_("File with the PGP keys that you trust (default: ~/.gnupg/pubring.gpg)")) parser.add_option("--gpg-binary", "-g", dest="gpg", help=_("(optional) Location of gpg binary path if not in the default search path")) parser.add_option("--convert-jigdo", "-j", action="store_true", dest="jigdo", help=_("Convert Jigdo format file to Metalink")) parser.add_option("--port", dest="port", help=_("Streaming server port to use (default: No streaming server)")) parser.add_option("--html", dest="html", help=_("Extract links from HTML webpage")) parser.add_option("--convert", dest="convert", action="store_true", help="Conversion from 3 to 4 (IETF RFC)") parser.add_option("--rconvert", dest="rev", action="store_true", help="Reverses conversion from 4 (IETF RFC) to 3") parser.add_option("--output", dest="output", metavar="OUTFILE", help=_("Output conversion result to this file instead of screen")) parser.add_option("--rss", "-r", action="store_true", dest="rss", help=_("RSS/Atom Feed Mode, implies -d")) parser.add_option("--testable", action="store_true", dest="only_testable", help=_("Limit tests to only the URL types we can test (HTTP/HTTPS/FTP)")) parser.add_option("-w", dest="writedir", default=os.getcwd(), help=_("Directory to write output files to (default: current directory)")) (options, args) = parser.parse_args() #if options.filevar != None: # args.append(options.filevar) if len(args) == 0: parser.print_help() return socket.setdefaulttimeout(10) proxy.set_proxies() if options.os != None: download.OS = options.os if options.language != None: download.LANG = [].extend(options.language.lower().split(",")) if options.country != None: download.COUNTRY = options.country if options.pgpdir != None: download.PGP_KEY_DIR = options.pgpdir if options.pgpstore != None: download.PGP_KEY_STORE = options.pgpstore if options.port != None: download.PORT = int(options.port) if options.gpg != None: GPG.DEFAULT_PATH.insert(0, options.gpg) if options.timeout != None: socket.setdefaulttimeout(int(options.timeout)) if options.country != None and len(options.country) != 2: print _("Invalid country length, must be 2 letter code") return if options.jigdo: print download.convert_jigdo(args[0]) return if options.convert: text = download.parse_metalink(args[0], ver=4).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.rev: text = download.parse_metalink(args[0], ver=3).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.html: handle = download.urlopen(options.html) text = handle.read() handle.close() page = checker.Webpage() page.set_url(options.html) page.feed(text) for item in page.urls: if item.endswith(".metalink"): print "=" * 79 print item mcheck = checker.Checker() mcheck.check_metalink(item) results = mcheck.get_results() print_totals(results) return if options.check: failure = False for item in args: print "=" * 79 print item mcheck = checker.Checker(options.only_testable) mcheck.check_metalink(item) results = mcheck.get_results() result = print_totals(results) failure |= result sys.exit(int(failure)) if options.download: for item in args: progress = ProgressBar() result = download.get(item, options.writedir, handlers={"status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time}, segmented = not options.nosegmented) progress.download_end() if not result: sys.exit(-1) if options.rss: for feed in args: progress = ProgressBar() result = download.download_rss(feed, options.writedir, handlers={"status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time}, segmented = not options.nosegmented) progress.download_end() if not result: sys.exit(-1) sys.exit(0)
# Show arguments show.add_argument("app", type=str, help="App to show. (e.g. WiiVNC)") show.add_argument("-r", "--host", type=str, help="Repository name (e.g. primary)", default="primary") args = parser.parse_args() # Get if args.cmd == "get": if args.app == "all": OpenShopChannel = metadata.API() OpenShopChannel.set_host(args.host) print(f"Starting download of all packages from \"{args.host}\" @ {repos.name(args.host)['host']}..") for package in OpenShopChannel.packages: download.get(app_name=package["internal_name"], repo=repos.name(args.host)["host"]) else: download.get(app_name=args.app, repo=repos.name(args.host)["host"]) # Send if args.cmd == "send": # get hostname of host host_url = repos.name(args.host)["host"] ok = wiiload.validate_ip_regex(ip=args.destination) if not ok: print(f"Error: The address '{args.destination}' is invalid! Please correct it!") exit(1) url = f"https://{host_url}/hbb/{args.app}/{args.app}.zip" r = requests.get(url)
if args.display is True: localcontents.dl_list(file=args.file, display=True, repo=args.host) else: localcontents.dl_list(file=args.file, repo=args.host) # get command if args.cmd == 'get': # Skip manual approval if specified if args.output is None: args.output = None if args.host is None: args.host = "hbb1.oscwii.org" if args.extract is None: args.extract = False if args.noconfirm is True: if parsecontents.query(term=args.name, repo=args.host) is False: exit(0) metadata.get(app_name=args.name, type="default", repo=args.host) download.get(app_name=args.name, output=args.output, extract=args.extract, repo=args.host) if args.noconfirm is False: if parsecontents.query(term=args.name, repo=args.host) is False: exit(0) download.confirm(app_name=args.name, repo=args.host) download.get(app_name=args.name, output=args.output, repo=args.host)
tarball = 'cargo-nightly-' + triple + '.tar.gz' url = 'https://static.rust-lang.org/cargo-dist/%s/%s' % \ (date.strip(), tarball) dl_path = "target/dl/" + tarball dst = "target/snapshot" if not os.path.isdir('target/dl'): os.makedirs('target/dl') if os.path.isdir(dst): shutil.rmtree(dst) exists = False if os.path.exists(dl_path): h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() if h == hash: print("file already present %s (%s)" % ( dl_path, hash, )) exists = True if not exists: download.get(url, dl_path) h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() if h != hash: raise Exception("failed to verify the checksum of the snapshot") download.unpack(dl_path, dst)
def get_ads(base_url): c = conn.cursor() page = download.get(base_url + "/search/cpg") for p in page.select(".row"): pid = p['data-pid'] a_tag = p.find('a', class_='hdrlnk') ad_href = a_tag['href'] ad_title = a_tag.text dt = p.find('time')['datetime'] dt = datetime.datetime.strptime(dt, "%Y-%m-%d %H:%M") dt = int(dt.strftime("%s")) c.execute("SELECT * FROM ad WHERE id = ?", (pid, )) row = c.fetchone() if row is None: url = ad_href if not ad_href.startswith('http'): url = base_url + ad_href time.sleep(0.5) ad = download.get(url) print url ad_text = ad.find(id='postingbody') if ad_text is None: if ad.find(id='has_been_removed'): continue else: raise "malformed body" ad_text = ad_text.text.strip() ad_text = filter(lambda x: x in string.printable, ad_text) nilsimsa = Nilsimsa(ad_text) lshash = nilsimsa.hexdigest() # c.execute("SELECT * FROM ad") # row = c.fetchone() # while row: # diff = nilsimsa.compare(row[4], True) # if diff < 10: # print diff # print cache.get("text:" + row[0]) # print "----" # print ad_text # sys.exit() seen = generate_word_counts(ad_text) cache.write("text:" + pid, ad_text) row = (pid, url, ad_title, dt, lshash) c.execute( "INSERT INTO ad (id, url, title, posted, lshash) " + " VALUES (?,?,?,?,?)", row) for word in seen: if word not in stopwords: row = (pid, word, seen[word]) c.execute( "INSERT INTO tf (id, word, cnt) " + "VALUES (?,?,?)", row) conn.commit()
import os import sys import hashlib import download date = sys.argv[1] print(date) if not os.path.isdir('target/dl'): os.makedirs('target/dl') snaps = { 'macos-i386': 'i686-apple-darwin', 'macos-x86_64': 'x86_64-apple-darwin', 'linux-i386': 'i686-unknown-linux-gnu', 'linux-x86_64': 'x86_64-unknown-linux-gnu', 'winnt-i386': 'i686-pc-windows-gnu', 'winnt-x86_64': 'x86_64-pc-windows-gnu', 'bitrig-x86_64': 'x86_64-unknown-bitrig', } for platform in sorted(snaps): triple = snaps[platform] tarball = 'cargo-nightly-' + triple + '.tar.gz' url = 'https://static.rust-lang.org/cargo-dist/' + date + '/' + tarball dl_path = "target/dl/" + tarball download.get(url, dl_path, quiet=True) h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() print(' ' + platform + ' ' + h)
#!/usr/bin/env python2 import download import get_cpg import re import random page = download.get("http://www.craigslist.org/about/sites") alst = page.find_all('a') random.shuffle(alst) for a in alst: if 'name' in a.attrs: if a['name'] != 'US': break if 'href' in a.attrs: g = re.match('http://[^\.]+\.craigslist.org', a['href']) if g: get_cpg.get_ads(g.group(0))
vendor = "pc" target_os = "windows-gnu" triple = "%s-%s-%s" % (arch, vendor, target_os) hash = snaps[date][platform] tarball = "cargo-nightly-" + triple + ".tar.gz" url = "https://static.rust-lang.org/cargo-dist/%s/%s" % (date.strip(), tarball) dl_path = "target/dl/" + tarball dst = "target/snapshot" if not os.path.isdir("target/dl"): os.makedirs("target/dl") if os.path.isdir(dst): shutil.rmtree(dst) exists = False if os.path.exists(dl_path): h = hashlib.sha1(open(dl_path, "rb").read()).hexdigest() if h == hash: print("file already present %s (%s)" % (dl_path, hash)) exists = True if not exists: download.get(url, dl_path) h = hashlib.sha1(open(dl_path, "rb").read()).hexdigest() if h != hash: raise Exception("failed to verify the checksum of the snapshot") download.unpack(dl_path, dst, strip=2)
def run(): """ Start a console version of this application. """ # Command line parser options. usage = "usage: %prog [-c|-d|-j|--convert|--rconvert] [options] arg1 arg2 ..." parser = optparse.OptionParser(version=checker.ABOUT, usage=usage) parser.add_option( "--download", "-d", action="store_true", dest="download", help=_("Actually download the file(s) in the metalink"), ) parser.add_option("--check", "-c", action="store_true", dest="check", help=_("Check the metalink file URLs")) # parser.add_option("--file", "-f", dest="filevar", metavar="FILE", help=_("Metalink file to check or file to download")) parser.add_option( "--timeout", "-t", dest="timeout", metavar="TIMEOUT", help=_("Set timeout in seconds to wait for response (default=10)"), ) parser.add_option("--os", "-o", dest="os", metavar="OS", help=_("Operating System preference")) parser.add_option( "--no-segmented", "-s", action="store_true", dest="nosegmented", help=_("Do not use the segmented download method"), ) parser.add_option("--lang", "-l", dest="language", metavar="LANG", help=_("Language preference (ISO-639/3166)")) parser.add_option( "--country", dest="country", metavar="LOC", help=_("Two letter country preference (ISO 3166-1 alpha-2)") ) parser.add_option( "--pgp-keys", "-k", dest="pgpdir", metavar="DIR", help=_("Directory with the PGP keys that you trust (default: working directory)"), ) parser.add_option( "--pgp-store", "-p", dest="pgpstore", metavar="FILE", help=_("File with the PGP keys that you trust (default: ~/.gnupg/pubring.gpg)"), ) parser.add_option( "--gpg-binary", "-g", dest="gpg", help=_("(optional) Location of gpg binary path if not in the default search path"), ) parser.add_option( "--convert-jigdo", "-j", action="store_true", dest="jigdo", help=_("Convert Jigdo format file to Metalink") ) parser.add_option("--port", dest="port", help=_("Streaming server port to use (default: No streaming server)")) parser.add_option("--html", dest="html", help=_("Extract links from HTML webpage")) parser.add_option("--convert", dest="convert", action="store_true", help="Conversion from 3 to 4 (IETF RFC)") parser.add_option("--rconvert", dest="rev", action="store_true", help="Reverses conversion from 4 (IETF RFC) to 3") parser.add_option( "--output", dest="output", metavar="OUTFILE", help=_("Output conversion result to this file instead of screen") ) parser.add_option("--rss", "-r", action="store_true", dest="rss", help=_("RSS/Atom Feed Mode, implies -d")) parser.add_option( "--testable", action="store_true", dest="only_testable", help=_("Limit tests to only the URL types we can test (HTTP/HTTPS/FTP)"), ) parser.add_option( "-w", dest="writedir", default=os.getcwd(), help=_("Directory to write output files to (default: current directory)"), ) (options, args) = parser.parse_args() # if options.filevar != None: # args.append(options.filevar) if len(args) == 0: parser.print_help() return socket.setdefaulttimeout(10) proxy.set_proxies() if options.os != None: download.OS = options.os if options.language != None: download.LANG = [].extend(options.language.lower().split(",")) if options.country != None: download.COUNTRY = options.country if options.pgpdir != None: download.PGP_KEY_DIR = options.pgpdir if options.pgpstore != None: download.PGP_KEY_STORE = options.pgpstore if options.port != None: download.PORT = int(options.port) if options.gpg != None: GPG.DEFAULT_PATH.insert(0, options.gpg) if options.timeout != None: socket.setdefaulttimeout(int(options.timeout)) if options.country != None and len(options.country) != 2: print _("Invalid country length, must be 2 letter code") return if options.jigdo: print download.convert_jigdo(args[0]) return if options.convert: text = download.parse_metalink(args[0], ver=4).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.rev: text = download.parse_metalink(args[0], ver=3).generate() if options.output: handle = open(options.output, "w") handle.write(text) handle.close() else: print text return if options.html: handle = download.urlopen(options.html) text = handle.read() handle.close() page = checker.Webpage() page.set_url(options.html) page.feed(text) for item in page.urls: if item.endswith(".metalink"): print "=" * 79 print item mcheck = checker.Checker() mcheck.check_metalink(item) results = mcheck.get_results() print_totals(results) return if options.check: failure = False for item in args: print "=" * 79 print item mcheck = checker.Checker(options.only_testable) mcheck.check_metalink(item) results = mcheck.get_results() result = print_totals(results) failure |= result sys.exit(int(failure)) if options.download: for item in args: progress = ProgressBar() result = download.get( item, options.writedir, handlers={ "status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time, }, segmented=not options.nosegmented, ) progress.download_end() if not result: sys.exit(-1) if options.rss: for feed in args: progress = ProgressBar() result = download.download_rss( feed, options.writedir, handlers={ "status": progress.download_update, "bitrate": progress.set_bitrate, "time": progress.set_time, }, segmented=not options.nosegmented, ) progress.download_end() if not result: sys.exit(-1) sys.exit(0)