def __init__(self, dbname=None, debug=False): self.debug = debug self.header = headers create_tb = ('\n' ' CREATE TABLE IF NOT EXISTS PROXY\n' ' (DATE DATETIME NOT NULL,\n' ' IP CHARACTER(15),\n' ' PORT INTEGER,\n' ' REGION TEXT NOT NULL,\n' ' PRIMARY KEY(IP, PORT)\n' ' );\n' ' ') if dbname is None or dbname == RESERVERD_DB_NAME: dbname = os.path.join(resource_path, RESERVERD_DB_NAME) try: conn = sqlite3.connect(dbname) except sqlite3.OperationalError as opex: color.print_err(opex) color.print_err('dbname : {0:s}'.format(dbname)) raise Exception() else: self.conn = conn self.dbname = dbname conn.execute(create_tb) color.print_ok('conect to the db {0}'.format(dbname)) try: import lxml except ImportError: self.parse_ip_port_region = proxy_ip.parse_ip_port_region_httpparser else: self.parse_ip_port_region = proxy_ip.parse_ip_port_region_lxml
def __init__(self, dbname=None, debug=False): self.debug = debug self.header = headers create_tb = ('\n' ' CREATE TABLE IF NOT EXISTS PROXY\n' ' (DATE DATETIME NOT NULL,\n' ' IP CHARACTER(15),\n' ' PORT INTEGER,\n' ' REGION TEXT NOT NULL,\n' ' PRIMARY KEY(IP, PORT)\n' ' );\n' ' ') if dbname is None or dbname == RESERVERD_DB_NAME: dbname = os.path.join(resource_path, RESERVERD_DB_NAME) try: conn = sqlite3.connect(dbname) except sqlite3.OperationalError as opex: color.print_err(opex) color.print_err('dbname : {0:s}'.format(dbname)) raise Exception() else: self.conn = conn self.dbname = dbname conn.execute(create_tb) color.print_ok('conect to the db {0}'.format(dbname)) try: import lxml except ImportError: self.parse_ip_port_region = proxy_ip.parse_ip_port_region_httpparser else: self.parse_ip_port_region = proxy_ip.parse_ip_port_region_lxml
def cut(fn, output, start_time, end_time, debug=False): if not assert_output_has_ext(output): color.print_err('Failed.') return start_time_int = video_time_str2int(start_time) end_time_int = video_time_str2int(end_time) long = end_time_int - start_time_int if long <= 0: color.print_err('end-time:%s is before than start-time:%s' % (end_time, start_time)) return fn_tmp = path2uuid(fn) output_tmp = path2uuid(output, rename=False, quiet=True) try: cmd = 'ffmpeg -ss %d -i "%s" -t %d -c:v copy -c:a copy "%s" ' \ % (start_time_int, fn_tmp, long, output) info_lines, err_lines = exec_cmd(cmd) if debug: print(cmd) print('Info: %s' % '\n'.join(err_lines)) path2uuid(output_tmp, d=True, rename=False) except Exception: raise else: color.print_ok('cut the video %s to %s from %s to %s' % (fn, output, start_time, end_time)) finally: path2uuid(fn_tmp, d=True)
def extract(fn, output, type, **other_kwargs): if not assert_output_has_ext(output): color.print_err('Failed.') return fn_tmp = path2uuid(fn, quiet=True) output_tmp = path2uuid(output, quiet=True, rename=False) extract_cmd_list = ['ffmpeg', '-i', fn_tmp] if type == 'audio': extract_cmd_list.extend(['-acodec', 'copy', '-vn', output_tmp]) elif type == 'video': extract_cmd_list.extend(['-vcodec', 'copy', '-an', output_tmp]) elif type == 'subtitle': extract_cmd_list.extend(['-scodec', 'copy', '-an', '-vn', output_tmp]) elif type == 'frame': start_time = video_time_str2int(other_kwargs['start-time']) extract_cmd_list.extend(['-y', '-f', 'image2', '-ss', str(start_time), '-vframes', '1', output_tmp]) else: color.print_err('error type: %s' % type) return # print(extract_cmd_list) for line in CommandRunner.run(' '.join(extract_cmd_list)): print(line) path2uuid(fn_tmp, d=True) try: path2uuid(output_tmp, d=True) except: path2uuid(output_tmp, d=True, rename=True) color.print_err('extract Failed.') else: color.print_ok('extract Done.')
def cli(): arguments = docopt(__doc__, version=minghu6.__version__) pattern = arguments['<pattern>'] for fn in os.listdir(os.curdir): if fn == '.path2uuid.sqlite3': continue if fnmatch.fnmatch(fn, pattern) or fn == pattern: res = path2uuid(fn, d=arguments['-d']) if res is None: color.print_info('%s Do nothing' % fn) else: color.print_ok('convert %s to %s' % (fn, res))
def main(n, ext): n = int(n) for i in range(n): # path = sys.stdin.readline().strip() #encoding pointed already path = input().strip() newpath = path + ext try: with Image.open(path) as img_obj: img_obj.save(newpath) os.remove(path) except Exception as ex: color.print_err(ex) color.print_ok('fetched %s. no.%d' % (newpath, i + 1))
def main(n, ext): n = int(n) for i in range(n): # path = sys.stdin.readline().strip() #encoding pointed already path = input().strip() newpath = path + ext try: with Image.open(path) as img_obj: img_obj.save(newpath) os.remove(path) except Exception as ex: color.print_err(ex) color.print_ok('fetched %s. no.%d' % (newpath, i + 1))
def isAlive(self, ip, port, region='中国大陆', test_url=None, timeout=4, allow_delete=True): proxy = {'http': '{0}:{1}'.format(ip, port)} print(proxy['http'], region) inside = {'中国大陆', 'china', 'taiwan', '台湾'} # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxy) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) # google.com not work ... if test_url is None: test_url = "http://www.qq.com" req = urllib.request.Request(test_url, headers=headers) try: resp = urllib.request.urlopen(req, timeout=timeout) if resp.code == 200: import bs4 content = resp.read() soup = bs4.BeautifulSoup(content, 'html.parser') s = soup.find('h1') if s is not None and s.contents[0].lower().find('unauthorized') != -1: color.print_err("Can't use") return False else: color.print_ok("work") # print(resp.read()) return True else: color.print_err("not work") return False except Exception as ex: color.print_err("Not work") if self.debug: color.print_err(ex) return False
def isAlive(self, ip, port, region='中国大陆', test_url=None, timeout=4, allow_delete=True): proxy = {'http': '{0}:{1}'.format(ip, port)} print(proxy['http'], region) inside = {'中国大陆', 'china', 'taiwan', '台湾'} # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxy) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) # google.com not work ... if test_url is None: test_url = "http://www.qq.com" req = urllib.request.Request(test_url, headers=headers) try: resp = urllib.request.urlopen(req, timeout=timeout) if resp.code == 200: import bs4 content = resp.read() soup = bs4.BeautifulSoup(content, 'html.parser') s = soup.find('h1') if s is not None and s.contents[0].lower().find('unauthorized') != -1: color.print_err("Can't use") return False else: color.print_ok("work") # print(resp.read()) return True else: color.print_err("not work") return False except Exception as ex: color.print_err("Not work") if self.debug: color.print_err(ex) return False
def cli(): global GLOBAL_MODE global CONFIG_DIR global TAG_LIST_CONFIG_PATH global SEARCH_PATTERN_CONFIG_PATH arguments = docopt(__doc__, version=__version__) GLOBAL_MODE = bool(arguments['--global']) if GLOBAL_MODE: if iswin(): CONFIG_DIR = os.path.join(os.path.dirname(get_home_dir()), 'All Users', '.jvirtualenv.d') else: CONFIG_DIR = '/etc/jvirtualenv.d' else: CONFIG_DIR = os.path.join(get_home_dir(), '.jvirtualenv.d') TAG_LIST_CONFIG_PATH = os.path.join(CONFIG_DIR, 'tag-list.json') SEARCH_PATTERN_CONFIG_PATH = os.path.join(CONFIG_DIR, 'search-pattern.json') if arguments['list-tag']: pretty_print_config(get_config()) elif arguments['reinit-tag']: if not iswin(): with sh.contrib.sudo: sh.updatedb() init_config() color.print_ok('reinit config in %s' % TAG_LIST_CONFIG_PATH) elif arguments['--java']: version_info = find_version(arguments['--java']) if version_info is None: color.print_err('No matched tag') return project_path = arguments['<project>'] write_activate_file(project_path, version_info['home'], version_info['tag'], bool(arguments['--force']))
def start(self): color.print_info('start analyse..., url {0:s}'.format(self.baseURL)) content = self.getPage(0) pageNum = self.getPageNum(content) title = self.getTitle(content) self.openFile(title) # Write LZ ID and NAME pattern = re.compile('<div id="post_content_.*?>(.*?)</div>') items = re.finditer(pattern, content) item = next(items) id_pattern = r'(?<=post_content_)(\d)+(?=")' lz_id = re.search(id_pattern, item.group(0)).group(0) # print(lz_id) lz_name = BDTB.get_name_by_id(lz_id) splitLine = "=" * 80 + '\n' self.file.write(splitLine) self.file.write('LZ {0} {1}\n'.format(lz_id, lz_name)) self.file.write(splitLine) if pageNum is None: color.print_err( "the URL {0:s} might be invalidated".format(self.baseURL)) return try: color.print_info( "This tie {0:s} has {1:d} pages".format(title, pageNum)) for i in range(1, int(pageNum) + 1): color.print_info("write to page {0:d}".format(i)) page = self.getPage(i) content = self.getContent(page) self.writeData(content) # 出现写入异常 except IOError as e: color.print_err(e) else: color.print_ok("Successful!") finally: self.closeFile()
def start(self): color.print_info('start analyse..., url {0:s}'.format(self.baseURL)) content = self.getPage(0) pageNum = self.getPageNum(content) title = self.getTitle(content) self.openFile(title) # Write LZ ID and NAME pattern = re.compile('<div id="post_content_.*?>(.*?)</div>') items = re.finditer(pattern, content) item = next(items) id_pattern = r'(?<=post_content_)(\d)+(?=")' lz_id = re.search(id_pattern, item.group(0)).group(0) # print(lz_id) lz_name = BDTB.get_name_by_id(lz_id) splitLine = "=" * 80 + '\n' self.file.write(splitLine) self.file.write('LZ {0} {1}\n'.format(lz_id, lz_name)) self.file.write(splitLine) if pageNum is None: color.print_err("the URL {0:s} might be invalidated".format( self.baseURL)) return try: color.print_info("This tie {0:s} has {1:d} pages".format( title, pageNum)) for i in range(1, int(pageNum) + 1): color.print_info("write to page {0:d}".format(i)) page = self.getPage(i) content = self.getContent(page) self.writeData(content) # 出现写入异常 except IOError as e: color.print_err(e) else: color.print_ok("Successful!") finally: self.closeFile()
def try_get_root(self, num, in_out='nn', timeout=5): url = "http://www.xicidaili.com/{0}/{1:d}".format(in_out, num) # 国内高匿 req = urllib.request.Request(url, headers=headers) result = proxy_ip.install_proxy_opener(test_url=url) if result is not None: try: resp = urllib.request.urlopen(req, timeout=timeout) except Exception as ex: color.print_err(ex) if self.debug: traceback.print_stack() return None else: color.print_ok('Connect server {0} OK!'.format(url)) return resp try: # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxies=None) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) resp = urllib.request.urlopen(req, timeout=timeout) except urllib.error.URLError as ex: return None except Exception as ex: color.print_err(ex) if self.debug: traceback.print_stack() else: color.print_ok('{0} Connect server {1} OK!'.format('origin ip', url)) return resp
def try_get_root(self, num, in_out='nn', timeout=5): url = "http://www.xicidaili.com/{0}/{1:d}".format(in_out, num) # 国内高匿 req = urllib.request.Request(url, headers=headers) result = proxy_ip.install_proxy_opener(test_url=url) if result is not None: try: resp = urllib.request.urlopen(req, timeout=timeout) except Exception as ex: color.print_err(ex) if self.debug: traceback.print_stack() return None else: color.print_ok('Connect server {0} OK!'.format(url)) return resp try: # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxies=None) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) resp = urllib.request.urlopen(req, timeout=timeout) except urllib.error.URLError as ex: return None except Exception as ex: color.print_err(ex) if self.debug: traceback.print_stack() else: color.print_ok('{0} Connect server {1} OK!'.format('origin ip', url)) return resp
def cli(): arguments = docopt(__doc__, version=__version__) if not in_toplevel_of_repo(): color.print_err('Not in toplevel of a git repo.') return if arguments['list']: n = int(arguments['-n']) result = max_file_hash_name(n) if result is not None: [color.print_info(item) for item in result] elif arguments['rm']: path_pattern = arguments['<path-pattern>'] force = True if arguments['-f'] else False result = remove_from_history(path_pattern, force) color.print_ok(result) color.print_warn('run `untrack confirm` to confirm the op') elif arguments['reset']: try: reset() except ErrorReturnCode_1 as ex: color.print_err(ex) except RebundantResetException as ex: color.print_warn(ex) else: color.print_ok('reset.') elif arguments['confirm']: confirm_remove() color.print_ok('confirm remove.')
def get_config(): if not has_config_file(): init_config() color.print_ok('init config in %s' % TAG_LIST_CONFIG_PATH) return json_load()
def merge(pattern_list, output, type, **other_kwargs): isprefix = other_kwargs.get('isprefix', False) if not assert_output_has_ext(output): color.print_err('Failed.') return base_dir = os.curdir merge_file_list = [] merge_file_list2 = [] if type in ('vedio', 'audio', 'gif'): for fn in os.listdir(base_dir): if os.path.isdir(fn): continue if fn == '.path2uuid.sqlite3': continue for pattern in pattern_list: if isprefix: if fn.lower().startswith(pattern.lower()): merge_file_list.append(fn) else: if fnmatch.fnmatch(fn, pattern): merge_file_list.append(fn) else: # 'va', 'vs merge_file_list = pattern_list # common_prefix_pattern = r'^(\w)+\+$' if isprefix and len(pattern_list) == 1: def key(fn): base = os.path.splitext(os.path.basename(fn))[0] v = LooseVersion(base.split(pattern_list[0])[1]) return v elif type in ('va', 'vs'): key = lambda x: 0 else: key = lambda fn: fn merge_file_list = sorted(merge_file_list, key=key) color.print_info('The following file will be merged in order') for i, file_to_merge in enumerate(merge_file_list): color.print_info('%3d. %s' % (i, file_to_merge)) if len(merge_file_list) <= 1: color.print_info('Do nothing.') return args = input('press enter to continue, q to quit') if args in ('q', 'Q'): return merge_file_tmp_list = list(map(lambda x: path2uuid(x, quiet=True), merge_file_list)) merge_file_tmp_list2 = [] if type == 'video': # check if the video can be merge FileInfo = namedtuple('FileInfo', ['width', 'height', 'fps']) merge_file_info_list = [] for fn in merge_file_tmp_list: json_obj = load_video_info_json(fn) video_site, audio_site = get_video_audio_info_site_injson(json_obj) codec_name = json_obj['streams'][video_site]['codec_name'] width = int(json_obj['streams'][video_site]['width']) height = int(json_obj['streams'][video_site]['height']) fps = round(load_fps_from_json(json_obj), 3) merge_file_info_list.append(FileInfo(width, height, fps)) if not each_same(merge_file_info_list, key=lambda x: (x.width, x.height, x.fps)): color.print_err('width, height, fps should be same of all video') min_width = sorted(merge_file_info_list, key=lambda x: x.width)[0].width min_height = sorted(merge_file_info_list, key=lambda x: x.height)[0].height min_resolution = '%dx%d' % (min_width, min_height) min_fps = sorted(merge_file_info_list, key=lambda x: x.fps)[0].fps color.print_warn('all_to_resolution: %s' % min_resolution) color.print_warn('all_to_fps: %s' % min_fps) if askyesno('convert to fix?'): merge_file_tmp_list2 = list(map(lambda x: add_postfix(x, 'tmp'), merge_file_tmp_list)) def tmp(fn_tuple): convert(*fn_tuple, size=min_resolution, fps=min_fps) list(map(lambda x: tmp(x), zip(merge_file_tmp_list, merge_file_tmp_list2))) else: return elif type == 'audio': pass elif type == 'va': pass elif type == 'gif': pass output_tmp = path2uuid(output, rename=False, quiet=True) if len(merge_file_tmp_list2) == 0: input_file_list = merge_file_tmp_list else: input_file_list = merge_file_tmp_list2 # only for merge video try: fw = open('.mylist', 'w') for fn in input_file_list: fw.write("file '%s' \n" % fn) fw.close() if type in ('video', 'audio'): merge_cmd = 'ffmpeg -f concat -i %s -c copy %s' % ('.mylist', output_tmp) elif type == 'va': merge_cmd = 'ffmpeg -i %s -i %s -vcodec copy -acodec copy %s ' \ % (input_file_list[0], input_file_list[1], output_tmp) elif type == 'vs': with open(input_file_list[1]) as f_subtitle: encoding = guess_charset(f_subtitle)['encoding'] if encoding.lower() not in ('utf-8', 'ascii'): info, err = exec_cmd('%s -m minghu6.tools.text convert %s utf-8' % (sys.executable, input_file_list[1])) if len(err) > 1 or err[0] != '': # exec failed color.print_err('error codec of the subtitle %s (need utf-8)') merge_cmd = 'ffmpeg -i %s -vf subtitles=%s %s' \ % (input_file_list[0], input_file_list[1], output_tmp) elif type == 'gif': framerate = other_kwargs['framerate'] merge_cmd = 'ffmpeg -f image2 -framerate %d -i %s %s' \ % (int(framerate), '.mylist', output_tmp) for line in CommandRunner.run(merge_cmd): print(line) path2uuid(output_tmp, d=True) except Exception: raise else: color.print_ok('Done.') finally: try: os.remove('.mylist') except: pass for fn in input_file_list: path2uuid(fn, d=True)
def cli(): arguments = docopt(__doc__, version=minghu6.__version__) path_list = arguments['<filename>'] try: fr_list = [] [fr_list.append(open(path, 'rb')) for path in path_list] except FileNotFoundError: color.print_err('%s not found' % path_list) return else: if arguments['charset']: fr = fr_list[0] result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if encoding is None: color.print_err('unknown') else: color.print_info('{0}, {1:.2f}'.format(encoding, confidence)) fr.close() elif arguments['convert']: fr = fr_list[0] path = path_list[0] to_charset = arguments['<to_charset>'] from_charset = arguments['--from_charset'] if from_charset is None: result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if confidence is None: color.print_err('unknown from_charset, ' 'you must point it explicity') return elif confidence < 0.7: color.print_warn('uncertained from_charset, ' 'maybe %s\n' 'you must point it explicity' % encoding) return else: from_charset = encoding # rename(name_old, name_new) # name_a, name_b must same driver in windows dir = os.path.dirname(os.path.abspath(path)) fwn = tempfile.mktemp(dir=dir) with open(fwn, 'wb') as fw: for line in fr: fw.write(line.decode(from_charset, errors='ignore') .encode(to_charset, errors='ignore')) fr.close() if arguments['--output'] is None: shutil.copy(fwn, path) else: shutil.copy(fwn, arguments['--output']) os.remove(fwn) elif arguments['merge']: if arguments['--regex'] is not None: # color.print_info(arguments) merge_file_path_list = findlist(startdir=os.curdir, pattern=arguments['--regex'], regex_match=True, dosort=True) else: merge_file_path_list = arguments['<filename>'] with open(arguments['--output'], 'wb') as outfile: for infile_path in merge_file_path_list: with open(infile_path, 'rb') as infile: outfile.write(infile.read()) outfile.write(b'\n') color.print_ok('have merged file %s' % infile_path)
def cli(): arguments = docopt(__doc__, version=minghu6.__version__) path_list = arguments['<filename>'] try: fr_list = [] [fr_list.append(open(path, 'rb')) for path in path_list] except FileNotFoundError: color.print_err('%s not found' % path_list) return else: if arguments['charset']: fr = fr_list[0] result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if encoding is None: color.print_err('unknown') else: color.print_info('{0}, {1:.2f}'.format(encoding, confidence)) fr.close() elif arguments['convert']: fr = fr_list[0] output = os.path.abspath(arguments['--output']) to_charset = arguments['<to_charset>'] from_charset = arguments['--from_charset'] if from_charset is None: result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if confidence is None: color.print_err('unknown from_charset, ' 'you must point it explicity') return elif confidence < 0.7: color.print_warn('uncertained from_charset, ' 'maybe %s\n' 'you must point it explicity' % encoding) return else: from_charset = encoding # rename(name_old, name_new) # name_a, name_b must same driver in windows with open(output, 'wb') as fw: for line in fr: #print(line.decode(from_charset)) print(line.decode(from_charset).encode(from_charset, errors='ignore').decode(to_charset, errors='ignore')) fw.write(line.decode(from_charset).encode(from_charset, errors='ignore').decode(to_charset, errors='ignore').encode(from_charset)) fr.close() elif arguments['merge']: if arguments['--regex'] is not None: print(arguments['--regex']) # color.print_info(arguments) merge_file_path_list = findlist(startdir=os.curdir, pattern=arguments['--regex'], regex_match=True, dosort=True) color.print_normal('merge file:') pprint(merge_file_path_list) else: merge_file_path_list = arguments['<filename>'] with open(arguments['--output'], 'wb') as outfile: for infile_path in merge_file_path_list: with open(infile_path, 'rb') as infile: outfile.write(infile.read()) outfile.write(b'\n') color.print_ok('have merged file %s' % infile_path)