def video_download(self, video_name, video_url): # print(self.urls) # 判断视频是否已经下载过 if video_url in self.urls: print("此视频已下载:" + video_name) return # 替换文件名特殊字符,避免创建文件时报错 video_name = re.sub(r'[/\:*?<>|]', '_', video_name) # size = 0 # 下载视频绮里小爱 with closing(requests.get(video_url, stream=True)) as response: chunk_size = 1024 content_size = int(response.headers['content-length']) # print(dir(response)) if response.status_code == 200: print('[文件大小]:%0.2f MB' % (content_size / chunk_size / 1024) + ' ' + video_url) download_size = 0 with open(video_name + '.mp4', 'wb') as file: for data in response.iter_content(chunk_size): file.write(data) # size += len(data) file.flush() download_size += chunk_size download_rate = download_size * 100 / content_size # print("\r%.2f...(%s)" % (download_rate, video_name), end="") print("[{}{}]{}...{}".format( "#" * int(download_rate), ' ' * (100 - int(download_rate)), download_rate, video_name)) sys.stdout.flush() self.dumplicate.write(video_url + '\n') response.close()
def test_profile_parser(self): """ Verify that the function parse_profile produces the expected output. """ with contextlib.closing(StringIO()) as stream: with contextlib.redirect_stdout(stream): cProfile.run('print()') stream.seek(0) actual = list(parse_profile(stream)) expected = [[ 'ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.exec}' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.print}' ], [ '1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}" ]] self.assertListEqual(actual, expected)
def insert(self, code, channel, host, zuser, user_mode, date, target_user, message, network): connstring = self.postgresConnectString() try: with closing(psycopg2.connect(connstring)) as conn: with closing(conn.cursor()) as cursor: cmd = "INSERT INTO chanlog (code, network, channel, host, zuser, user_mode, target_user, message, date) values (%s,%s,%s,%s,%s,%s,%s,%s,%s)" cursor.execute(cmd, (code, network, channel, host, zuser, user_mode, target_user, message, date)) cursor.close() conn.commit() except Exception as e: self.PutModule( "Could not save {0} to database caused by: {1} {2}".format( code, type(e), str(e))) journal.send(repr(e))
def make_tiny(url): request_url = ('http://tinyurl.com/api-create.php?' + urlencode({'url': url})) try: with contextlib2.closing(urlopen(request_url)) as response: return response.read().decode('utf-8') except IOError: print('error') return url
def remote_tunnel(self): with contextlib.ExitStack() as stack: if self.ssh_tunnel: output = stack.enter_context( contextlib.closing(open(os.devnull, 'w'))) # noqa # forward sys.stdout to os.devnull to prevent # printing debug messages by fab.remote_tunnel stack.enter_context(utils.patch(sys, 'stdout', output)) stack.enter_context( fab.remote_tunnel( remote_bind_address=self.ssh_tunnel.bind_address, remote_port=self.ssh_tunnel.port, local_host=self.ssh_tunnel.host, local_port=self.ssh_tunnel.host_port, )) yield
def build(files, srs): with closing(tempfile.NamedTemporaryFile(suffix='.vrt')) as vrt: # ensure files are actually present before trying to make a VRT from # them. for f in files: assert os.path.exists(f), "Trying to build a VRT including file " \ "%r, but it does not seem to exist." % f args = ["gdalbuildvrt", "-q", "-a_srs", srs, vrt.name ] + files status = subprocess.call(args) if status != 0: raise RuntimeError("Call to gdalbuildvrt failed: status=%r" % status) ds = gdal.Open(vrt.name) yield ds del ds
def build(files, srs): with closing(tempfile.NamedTemporaryFile(suffix='.vrt')) as vrt: # ensure files are actually present before trying to make a VRT from # them. for f in files: assert os.path.exists(f), "Trying to build a VRT including file " \ "%r, but it does not seem to exist." % f args = ["gdalbuildvrt", "-q", "-a_srs", srs, vrt.name] + files status = subprocess.call(args) if status != 0: raise RuntimeError("Call to gdalbuildvrt failed: status=%r" % status) ds = gdal.Open(vrt.name) yield ds del ds
def test_profile_parser(self): """ Verify that the function parse_profile produces the expected output. """ with contextlib.closing(io.StringIO()) as stream: with contextlib.redirect_stdout(stream): cProfile.run('print()') stream.seek(0) actual = list(parse_profile(stream)) # Expected format for the profiling output on cPython implementations (PyPy differs) # actual = [ # ["ncalls", "tottime", "percall", "cumtime", "percall", "filename:lineno(function)"], # ["1", "0.000", "0.000", "0.000", "0.000", "<string>:1(<module>)"], # ["1", "0.000", "0.000", "0.000", "0.000", "{built-in method builtins.exec}"], # ["1", "0.000", "0.000", "0.000", "0.000", "{built-in method builtins.print}"], # ["1", "0.000", "0.000", "0.000", "0.000", "{method 'disable' of '_lsprof.Profiler' objects}"], # ] exc_header = [ "ncalls", "tottime", "percall", "cumtime", "percall", "filename:lineno(function)" ] self.assertEqual(actual[0], exc_header) exc_number = re.compile(r"\d(.\d+)?") exc_module = re.compile( r"({method.*})|({built-in.*})|(<.+>:\d+\(<.+>\))") exc_row = [ exc_number, exc_number, exc_number, exc_number, exc_number, exc_module ] for row in actual[1:]: for text, expected_regex in zip(row, exc_row): self.assertRegex( text, expected_regex, msg="Expected something like {} but found {}")
def test_profile_parser(self): """ Verify that the function parse_profile produces the expected output. """ with contextlib.closing(StringIO()) as stream: with contextlib.redirect_stdout(stream): cProfile.run('print()') stream.seek(0) actual = list(parse_profile(stream)) if PY3: if sys.version_info < (3,5): expected = [ ['ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)'], ['1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)'], ['1', '0.000', '0.000', '0.000', '0.000', '{built-in method exec}'], ['1', '0.000', '0.000', '0.000', '0.000', '{built-in method print}'], ['1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}"], ] else: expected = [ ['ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)'], ['1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)'], ['1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.exec}'], ['1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.print}'], ['1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}"], ] else: expected = [ ['ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)'], ['1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)'], ['2', '0.000', '0.000', '0.000', '0.000', 'StringIO.py:208(write)'], ['2', '0.000', '0.000', '0.000', '0.000', 'StringIO.py:38(_complain_ifclosed)'], ['2', '0.000', '0.000', '0.000', '0.000', '{isinstance}'], ['2', '0.000', '0.000', '0.000', '0.000', '{len}'], ['2', '0.000', '0.000', '0.000', '0.000', "{method 'append' of 'list' objects}"], ['1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}"] ] self.assertListEqual(actual, expected)
def __extract_ips_from_txt_list(self): global copy outfile = open(self.__TEMP_BAD_IP_LIST_NAME, 'w+') ips = [] for url in badReputationIPLists: try: assert isinstance(url, str) assert ("http://" in url) or ("https://" in url) logging.info("Checking URL: " + url) copy = requests.get(url, stream=True) except HTTPError as error_code: logging.error("The requested URL: " + url + " has returned" + str(error_code)) continue except AssertionError as e: logging.error("An AssertionError occurred: " + str(e.__cause__)) continue with closing(copy): for line in copy.iter_lines(): line = line.rstrip() line = line.decode('utf-8') regex = re.findall(self.__REGEX_VALIDATE_IP, line) if len(regex) > 0 is not None and regex not in ips: ips.append(regex[0]) for ip in ips: ip_address = "".join(ip) if not ip_address == "": outfile.write(ip_address + "\n") outfile.close()
def test_profile_parser(self): """ Verify that the function parse_profile produces the expected output. """ with contextlib.closing(StringIO()) as stream: with contextlib.redirect_stdout(stream): cProfile.run('print()') stream.seek(0) actual = list(parse_profile(stream)) if PY3: if sys.version_info < (3, 5): expected = [ [ 'ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method exec}' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method print}' ], [ '1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}" ], ] else: expected = [ [ 'ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.exec}' ], [ '1', '0.000', '0.000', '0.000', '0.000', '{built-in method builtins.print}' ], [ '1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}" ], ] else: expected = [ [ 'ncalls', 'tottime', 'percall', 'cumtime', 'percall', 'filename:lineno(function)' ], [ '1', '0.000', '0.000', '0.000', '0.000', '<string>:1(<module>)' ], [ '2', '0.000', '0.000', '0.000', '0.000', 'StringIO.py:208(write)' ], [ '2', '0.000', '0.000', '0.000', '0.000', 'StringIO.py:38(_complain_ifclosed)' ], ['2', '0.000', '0.000', '0.000', '0.000', '{isinstance}'], ['2', '0.000', '0.000', '0.000', '0.000', '{len}'], [ '2', '0.000', '0.000', '0.000', '0.000', "{method 'append' of 'list' objects}" ], [ '1', '0.000', '0.000', '0.000', '0.000', "{method 'disable' of '_lsprof.Profiler' objects}" ] ] self.assertListEqual(actual, expected)
#!/usr/bin/env python """Adds InCommon SAML federation metadata to `samlfederations` table.""" import os import sys from contextlib2 import closing bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from api.saml.metadata.federations import incommon from api.saml.metadata.federations.model import SAMLFederation from core.model import production_session with closing(production_session()) as db: incommon_federation = (db.query(SAMLFederation).filter( SAMLFederation.type == incommon.FEDERATION_TYPE).one_or_none()) if not incommon_federation: incommon_federation = SAMLFederation( incommon.FEDERATION_TYPE, incommon.IDP_METADATA_SERVICE_URL, incommon.CERTIFICATE, ) db.add(incommon_federation) db.commit()
def get(url, options={}): """ Download a file to a temporary directory, returning it. The options provided will control the behaviour of the download algorithm. * 'tries' - The maximum number of tries to download the file before giving up and raising an exception. * 'timeout' - Timeout in seconds before considering the connection to have failed. * 'verifier' - A function which is called with a filelike object. It should return True if the file is okay and appears to be fully downloaded. """ logger = logging.getLogger('download') with closing(tempfile.NamedTemporaryFile()) as tmp: # current file position = number of bytes read filepos = 0 # file size when downloaded, if known filesize = None # number of attempts so far tries = 0 # last try which resulted in some forward progress (i.e: filepos got # bigger) last_successful_try = 0 # maximum number of attempts to make max_tries = options.get('tries', 1) # timeout for blocking operations (e.g: connect) in seconds timeout = options.get('timeout', 60) # verifier function verifier = options.get('verifier') # backoff function - to delay between retries backoff = options.get('backoff') # whether the server supports Range headers (if it doesn't we'll have # to restart from the beginning every time). accept_range = False # we need to download _something_ if the file position is less than the # known size, or the size is unknown. while filesize is None or filepos < filesize: # explode if we've exceeded the number of allowed attempts if tries >= max_tries: raise DownloadFailedError("Max tries exceeded (%d) while " "downloading file %r" % (max_tries, url)) else: if backoff and tries > last_successful_try: backoff(tries - last_successful_try) tries += 1 req = urllib2.Request(url) # if the server supports accept range, and we have a partial # download then attemp to resume it. if accept_range and filepos > 0: logger.info("Continuing (try %d/%d) at %d bytes: %r" % (tries, max_tries, filepos, url)) assert filesize is not None req.headers['Range'] = 'bytes=%s-%s' % (filepos, filesize - 1) else: # otherwise, truncate the file in readiness to download from # scratch. logger.info("Downloading (try %d/%d) %r" % (tries, max_tries, url)) filepos = 0 tmp.seek(0, os.SEEK_SET) tmp.truncate(0) try: f = urllib2.urlopen(req, timeout=timeout) # try to get the filesize, if the server reports it. if filesize is None: content_length = f.info().get('Content-Length') if content_length is not None: try: filesize = int(content_length) except ValueError: pass # detect whether the server accepts Range requests. accept_range = f.info().get('Accept-Ranges') == 'bytes' # copy data from the server shutil.copyfileobj(f, tmp) except (IOError, httplib.HTTPException) as e: logger.debug("Got HTTP error: %s" % str(e)) continue except ftplib.all_errors as e: logger.debug("Got FTP error: %s" % str(e)) continue except socket.timeout as e: logger.debug("Got socket timeout: %s" % str(e)) continue # update number of bytes read (this would be nicer if copyfileobj # returned it. old_filepos = filepos filepos = tmp.tell() if filepos > old_filepos: last_successful_try = tries # if we don't know how large the file is supposed to be, then # verify it every time. if filesize is None and verifier is not None: # reset tmp file to beginning for verification tmp.seek(0, os.SEEK_SET) if verifier(tmp): break # no need to reset here - since filesize is none, then we'll be # downloading from scratch, which will truncate the file. # verify the file, if it hasn't been verified before if filesize is not None and verifier is not None: # reset tmp file to beginning for verification tmp.seek(0, os.SEEK_SET) if not verifier(tmp): raise DownloadFailedError("File downloaded from %r failed " "verification" % url) tmp.seek(0, os.SEEK_SET) yield tmp
def make_tiny(url): request_url = ('http://tinyurl.com/api-create.php?' + urlencode({'url': url})) with contextlib2.closing(urlopen(request_url)) as response: return response.read().decode('utf-8')