def install_proxy_opener(dbname=None, test_url=None, region=None, timeout=4, allow_delete=True): proxy_instance = proxy_ip(dbname=dbname) ip_set = proxy_instance.get_ip_port(region=region) # print(ip_set) for ip, port in ip_set: proxy = {'http': '{0}:{1}'.format(ip, port)} color.print_info('\ntry ') if not proxy_instance.isAlive(ip, port, test_url=test_url, allow_delete=allow_delete, timeout=timeout): proxy_instance.delete_db(ip, port) color.print_warn('not work, delete in db') else: # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxy) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) # Then, you can use method urllib.request.urlopen() return ip, port return None
def main(path, n, encoding=None, no_more=False): try: with open(path, 'rb') as f: res_list = fileecho.tail(f, n) res = b'\n'.join(res_list) detect_result = chardet.detect(res) if encoding is not None: codec = encoding elif detect_result['confidence'] > 0.7: codec = detect_result['encoding'] else: color.print_warn('Not Known encoding, may be %s.\n' 'Please point it explictly' % detect_result['encoding']) return if no_more: color.print_info(res.decode(codec, errors='ignore')) else: more(res.decode(codec, errors='ignore'), print_color=True) except FileNotFoundError: color.print_err('%s not found' % path) except PermissionError: color.print_err('Permission denied: %s' % path)
def check_module(module_name, install_name=''): """ check if the module exist, if not exist try to install by pip (you can provide the install name manually) :param module_name: :param install_name: default equals module name :return: """ try: import_module(module_name) except ImportError: color.print_warn(module_name, 'Not Exists') pip_name = '' if ispython3(): pip_name = 'pip3' elif ispython2(): pip_name = 'pip' color.print_info( 'Now, try to install through {}, wait please...:)'.format( pip_name)) if install_name in ('', None): install_name = module_name info_lines, err_lines = exec_cmd('{0} install {1}'.format( pip_name, install_name)) print('\n'.join(info_lines)) if len(err_lines) != 0: print(''.join(err_lines)) else: pass
def getPage(self, pageNum): # 构建URL url = self.baseURL + self.seeLZ_str + '&pn=' + str(pageNum) try: request = urllib.request.Request(url, headers=headers) response = urllib.request.urlopen(request, timeout=600) content = None while True: try: content = response.read() except http.client.IncompleteRead as ex: color.print_warn(ex) color.print_info('retry...') else: break # 返回UTF-8格式编码内容 return content.decode('utf-8') # 无法连接,报错 except urllib.error.URLError as e: if hasattr(e, "reason"): color.print_err( "Failed to connect to BaiDuTieBa, Error Reason", e.reason) return None
def install_proxy_opener(dbname=None, test_url=None, region=None, timeout=4, allow_delete=True): proxy_instance = proxy_ip(dbname=dbname) ip_set = proxy_instance.get_ip_port(region=region) # print(ip_set) for ip, port in ip_set: proxy = {'http': '{0}:{1}'.format(ip, port)} color.print_info('\ntry ') if not proxy_instance.isAlive(ip, port, test_url=test_url, allow_delete=allow_delete, timeout=timeout): proxy_instance.delete_db(ip, port) color.print_warn('not work, delete in db') else: # 使用这个方式是全局方法。 proxy_support = urllib.request.ProxyHandler(proxy) opener = urllib.request.build_opener(proxy_support) urllib.request.install_opener(opener) # Then, you can use method urllib.request.urlopen() return ip, port return None
def main(path, n, encoding=None, no_more=False): try: with open(path, 'rb') as f: res_list = fileecho.head(f, n) res = b'\n'.join(res_list) detect_result = chardet.detect(res) if encoding is not None: codec = encoding elif detect_result['confidence'] > 0.7: codec = detect_result['encoding'] else: color.print_warn('Not Known encoding, may be %s.\n' 'Please point it explictly' % detect_result['encoding']) return if no_more: color.print_info(res.decode(codec, errors='ignore')) else: more(res.decode(codec, errors='ignore'), print_color=True) except FileNotFoundError: color.print_err('%s not found' % path) except PermissionError: color.print_err('Permission denied: %s' % path)
def getPage(self, pageNum): # 构建URL url = self.baseURL + self.seeLZ_str + '&pn=' + str(pageNum) try: request = urllib.request.Request(url, headers=headers) response = urllib.request.urlopen(request, timeout=600) content = None while True: try: content = response.read() except http.client.IncompleteRead as ex: color.print_warn(ex) color.print_info('retry...') else: break # 返回UTF-8格式编码内容 return content.decode('utf-8') # 无法连接,报错 except urllib.error.URLError as e: if hasattr(e, "reason"): color.print_err( "Failed to connect to BaiDuTieBa, Error Reason", e.reason) return None
def cli(): arguments = docopt(__doc__, version=__version__) if not in_toplevel_of_repo(): color.print_err('Not in toplevel of a git repo.') return if arguments['list']: n = int(arguments['-n']) result = max_file_hash_name(n) if result is not None: [color.print_info(item) for item in result] elif arguments['rm']: path_pattern = arguments['<path-pattern>'] force = True if arguments['-f'] else False result = remove_from_history(path_pattern, force) color.print_ok(result) color.print_warn('run `untrack confirm` to confirm the op') elif arguments['reset']: try: reset() except ErrorReturnCode_1 as ex: color.print_err(ex) except RebundantResetException as ex: color.print_warn(ex) else: color.print_ok('reset.') elif arguments['confirm']: confirm_remove() color.print_ok('confirm remove.')
def pretty_print_config(version_infos): color.print_info('-' * 80) for version_info in version_infos: color.print_info() color.print_info('tag: {}'.format(version_info['tag'])) color.print_info('version: {}'.format(version_info['version'])) color.print_info('bit: {}'.format(version_info['bit'])) color.print_info('home: {}'.format(version_info['home'])) color.print_info() color.print_info('-' * 80) if not version_infos: color.print_warn('empty')
def check_db_pool(self): self.conn.close() self.conn = sqlite3.connect(self.dbname) conn = self.conn query_cmd = ''' select IP,PORT,REGION from PROXY; ''' cursor = conn.execute(query_cmd) for row in cursor: if not self.isAlive(row[0], row[1]): # 代理失效, 要从数据库从删除 delete_cmd = ''' delete from PROXY where IP='%s' ''' % row[0] color.print_warn("delete IP %s in db" % row[0]) conn.execute(delete_cmd) conn.commit() conn.close()
def check_db_pool(self): self.conn.close() self.conn = sqlite3.connect(self.dbname) conn = self.conn query_cmd = ''' select IP,PORT,REGION from PROXY; ''' cursor = conn.execute(query_cmd) for row in cursor: if not self.isAlive(row[0], row[1]): # 代理失效, 要从数据库从删除 delete_cmd = ''' delete from PROXY where IP='%s' ''' % row[0] color.print_warn("delete IP %s in db" % row[0]) conn.execute(delete_cmd) conn.commit() conn.close()
def write_activate_file(virtual_env, java_home, java_tag, force=False): virtual_env = os.path.abspath(virtual_env) if os.path.lexists(virtual_env) and not force: color.print_warn('project diretory %s exists already\n' 'you can use -f argument to continue.' % virtual_env) return ensure_dir_exists(virtual_env) activate_dir = os.path.join(virtual_env, 'bin') ensure_dir_exists(os.path.join(activate_dir)) if iswin(): activate_path = os.path.join(activate_dir, 'activate.bat') deactivate_path = os.path.join(activate_dir, 'deactivate.bat') with open(activate_path, 'w') as fw_activate, open(deactivate_path, 'w') as fw_deactivate: fw_activate.write( create_activate_s(virtual_env, java_home, java_tag)) from jvirtualenv.template.deactivate_template_bat import template fw_deactivate.write(template) color.print_info('create active file {0}'.format(activate_path)) color.print_info('run "{0}" to activate it'.format(activate_path)) color.print_info('run "{0}" to deactivate it'.format(deactivate_path)) else: activate_path = os.path.join(activate_dir, 'activate') with open(activate_path, 'w') as fw: fw.write(create_activate_s(virtual_env, java_home, java_tag)) color.print_info('create active file {0}'.format(activate_path)) color.print_info('run `source {0}` to activate it'.format( path_to(os.curdir, activate_path)))
def cli(): arguments = docopt(__doc__, version=minghu6.__version__) path_list = arguments['<filename>'] try: fr_list = [] [fr_list.append(open(path, 'rb')) for path in path_list] except FileNotFoundError: color.print_err('%s not found' % path_list) return else: if arguments['charset']: fr = fr_list[0] result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if encoding is None: color.print_err('unknown') else: color.print_info('{0}, {1:.2f}'.format(encoding, confidence)) fr.close() elif arguments['convert']: fr = fr_list[0] path = path_list[0] to_charset = arguments['<to_charset>'] from_charset = arguments['--from_charset'] if from_charset is None: result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if confidence is None: color.print_err('unknown from_charset, ' 'you must point it explicity') return elif confidence < 0.7: color.print_warn('uncertained from_charset, ' 'maybe %s\n' 'you must point it explicity' % encoding) return else: from_charset = encoding # rename(name_old, name_new) # name_a, name_b must same driver in windows dir = os.path.dirname(os.path.abspath(path)) fwn = tempfile.mktemp(dir=dir) with open(fwn, 'wb') as fw: for line in fr: fw.write(line.decode(from_charset, errors='ignore') .encode(to_charset, errors='ignore')) fr.close() if arguments['--output'] is None: shutil.copy(fwn, path) else: shutil.copy(fwn, arguments['--output']) os.remove(fwn) elif arguments['merge']: if arguments['--regex'] is not None: # color.print_info(arguments) merge_file_path_list = findlist(startdir=os.curdir, pattern=arguments['--regex'], regex_match=True, dosort=True) else: merge_file_path_list = arguments['<filename>'] with open(arguments['--output'], 'wb') as outfile: for infile_path in merge_file_path_list: with open(infile_path, 'rb') as infile: outfile.write(infile.read()) outfile.write(b'\n') color.print_ok('have merged file %s' % infile_path)
def merge(pattern_list, output, type, **other_kwargs): isprefix = other_kwargs.get('isprefix', False) if not assert_output_has_ext(output): color.print_err('Failed.') return base_dir = os.curdir merge_file_list = [] merge_file_list2 = [] if type in ('vedio', 'audio', 'gif'): for fn in os.listdir(base_dir): if os.path.isdir(fn): continue if fn == '.path2uuid.sqlite3': continue for pattern in pattern_list: if isprefix: if fn.lower().startswith(pattern.lower()): merge_file_list.append(fn) else: if fnmatch.fnmatch(fn, pattern): merge_file_list.append(fn) else: # 'va', 'vs merge_file_list = pattern_list # common_prefix_pattern = r'^(\w)+\+$' if isprefix and len(pattern_list) == 1: def key(fn): base = os.path.splitext(os.path.basename(fn))[0] v = LooseVersion(base.split(pattern_list[0])[1]) return v elif type in ('va', 'vs'): key = lambda x: 0 else: key = lambda fn: fn merge_file_list = sorted(merge_file_list, key=key) color.print_info('The following file will be merged in order') for i, file_to_merge in enumerate(merge_file_list): color.print_info('%3d. %s' % (i, file_to_merge)) if len(merge_file_list) <= 1: color.print_info('Do nothing.') return args = input('press enter to continue, q to quit') if args in ('q', 'Q'): return merge_file_tmp_list = list(map(lambda x: path2uuid(x, quiet=True), merge_file_list)) merge_file_tmp_list2 = [] if type == 'video': # check if the video can be merge FileInfo = namedtuple('FileInfo', ['width', 'height', 'fps']) merge_file_info_list = [] for fn in merge_file_tmp_list: json_obj = load_video_info_json(fn) video_site, audio_site = get_video_audio_info_site_injson(json_obj) codec_name = json_obj['streams'][video_site]['codec_name'] width = int(json_obj['streams'][video_site]['width']) height = int(json_obj['streams'][video_site]['height']) fps = round(load_fps_from_json(json_obj), 3) merge_file_info_list.append(FileInfo(width, height, fps)) if not each_same(merge_file_info_list, key=lambda x: (x.width, x.height, x.fps)): color.print_err('width, height, fps should be same of all video') min_width = sorted(merge_file_info_list, key=lambda x: x.width)[0].width min_height = sorted(merge_file_info_list, key=lambda x: x.height)[0].height min_resolution = '%dx%d' % (min_width, min_height) min_fps = sorted(merge_file_info_list, key=lambda x: x.fps)[0].fps color.print_warn('all_to_resolution: %s' % min_resolution) color.print_warn('all_to_fps: %s' % min_fps) if askyesno('convert to fix?'): merge_file_tmp_list2 = list(map(lambda x: add_postfix(x, 'tmp'), merge_file_tmp_list)) def tmp(fn_tuple): convert(*fn_tuple, size=min_resolution, fps=min_fps) list(map(lambda x: tmp(x), zip(merge_file_tmp_list, merge_file_tmp_list2))) else: return elif type == 'audio': pass elif type == 'va': pass elif type == 'gif': pass output_tmp = path2uuid(output, rename=False, quiet=True) if len(merge_file_tmp_list2) == 0: input_file_list = merge_file_tmp_list else: input_file_list = merge_file_tmp_list2 # only for merge video try: fw = open('.mylist', 'w') for fn in input_file_list: fw.write("file '%s' \n" % fn) fw.close() if type in ('video', 'audio'): merge_cmd = 'ffmpeg -f concat -i %s -c copy %s' % ('.mylist', output_tmp) elif type == 'va': merge_cmd = 'ffmpeg -i %s -i %s -vcodec copy -acodec copy %s ' \ % (input_file_list[0], input_file_list[1], output_tmp) elif type == 'vs': with open(input_file_list[1]) as f_subtitle: encoding = guess_charset(f_subtitle)['encoding'] if encoding.lower() not in ('utf-8', 'ascii'): info, err = exec_cmd('%s -m minghu6.tools.text convert %s utf-8' % (sys.executable, input_file_list[1])) if len(err) > 1 or err[0] != '': # exec failed color.print_err('error codec of the subtitle %s (need utf-8)') merge_cmd = 'ffmpeg -i %s -vf subtitles=%s %s' \ % (input_file_list[0], input_file_list[1], output_tmp) elif type == 'gif': framerate = other_kwargs['framerate'] merge_cmd = 'ffmpeg -f image2 -framerate %d -i %s %s' \ % (int(framerate), '.mylist', output_tmp) for line in CommandRunner.run(merge_cmd): print(line) path2uuid(output_tmp, d=True) except Exception: raise else: color.print_ok('Done.') finally: try: os.remove('.mylist') except: pass for fn in input_file_list: path2uuid(fn, d=True)
def cli(): arguments = docopt(__doc__, version=minghu6.__version__) path_list = arguments['<filename>'] try: fr_list = [] [fr_list.append(open(path, 'rb')) for path in path_list] except FileNotFoundError: color.print_err('%s not found' % path_list) return else: if arguments['charset']: fr = fr_list[0] result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if encoding is None: color.print_err('unknown') else: color.print_info('{0}, {1:.2f}'.format(encoding, confidence)) fr.close() elif arguments['convert']: fr = fr_list[0] output = os.path.abspath(arguments['--output']) to_charset = arguments['<to_charset>'] from_charset = arguments['--from_charset'] if from_charset is None: result = fileecho.guess_charset(fr) encoding, confidence = result['encoding'], result['confidence'] if confidence is None: color.print_err('unknown from_charset, ' 'you must point it explicity') return elif confidence < 0.7: color.print_warn('uncertained from_charset, ' 'maybe %s\n' 'you must point it explicity' % encoding) return else: from_charset = encoding # rename(name_old, name_new) # name_a, name_b must same driver in windows with open(output, 'wb') as fw: for line in fr: #print(line.decode(from_charset)) print(line.decode(from_charset).encode(from_charset, errors='ignore').decode(to_charset, errors='ignore')) fw.write(line.decode(from_charset).encode(from_charset, errors='ignore').decode(to_charset, errors='ignore').encode(from_charset)) fr.close() elif arguments['merge']: if arguments['--regex'] is not None: print(arguments['--regex']) # color.print_info(arguments) merge_file_path_list = findlist(startdir=os.curdir, pattern=arguments['--regex'], regex_match=True, dosort=True) color.print_normal('merge file:') pprint(merge_file_path_list) else: merge_file_path_list = arguments['<filename>'] with open(arguments['--output'], 'wb') as outfile: for infile_path in merge_file_path_list: with open(infile_path, 'rb') as infile: outfile.write(infile.read()) outfile.write(b'\n') color.print_ok('have merged file %s' % infile_path)