def do_actions(): arr = sys.argv[1:] if(arr[0] == '-help'): s = ''' #read from params,-login must be the first python3 mw316r.py -login -ip "192.168.1.1" -pw "lidapeng" #read from config.cfg python3 mw316r.py -login python3 mw316r.py -status python3 mw316r.py -status wan python3 mw316r.py -status lan python3 mw316r.py -status wlan python3 mw316r.py -status general python3 mw316r.py -status wan_stats python3 mw316r.py -status wlan wan lan ''' print(s) elif(arr[0] == '-login'): ckstr = login() nvft.write_to_file(fn="ckstr.record",op="w+",content=ckstr) elif(arr[0] == '-status'): ckstr = load_ckstr() params = arr[1:] lngth = params.__len__() if(lngth == 0): d = get_status(ckstr) pobj(d) else: params = elel.array_map(params,lambda ele:ACTIONS['status'][ele]) d = get_status(ckstr) for key in params: pobj(d[key]) elif(arr[0] == '-connect'):
def load_ckstr(): try: ckstr = nvft.read_file_content(fn="ckstr.record",op="r+") except: ckstr = login() nvft.write_to_file(fn="ckstr.record",op="w+",content=ckstr) else: pass return(ckstr)
def get_all_pages(ckstr,base=BASE): ics = [] cmds = "" for key in URLS: fn = key +".html" url = URLS[key] ic = get_page(ckstr,url,base) nvft.write_to_file(fn=fn,op="wb+",content=ic["resp_body_bytes"]) cmds = cmds + pscp_cmd(fn) + '\nroot\r\n' ics.append(ic) print(cmds) return(ics)
def get_country_island_dict(root, database_parent_dir='../', display=0): d = {} eles_country = root.xpath('//select[@name="Country"]/option') for i in range(1, eles_country.__len__()): code = eles_country[i].get('value') country = eles_country[i].text d[code] = country d[country] = code exact_dir = ''.join((database_parent_dir, 'INFOS/')) if (os.path.exists(exact_dir)): pass else: os.makedirs(exact_dir) exact_dir = ''.join((database_parent_dir, 'INFOS/country.dict')) nvft.write_to_file(fn=exact_dir, content=json.dumps(d), op='w+') exact_dir = ''.join((database_parent_dir, 'INFOS/country.info')) info = get_printed_str(d, with_color=0, display=display) nvft.write_to_file(fn=exact_dir, content=info, op='w+') return (d)
def get_country_infos(c_code, country_island_dict, info_container, records_container, **kwargs): if ('display' in kwargs): display = int(kwargs['display']) else: display = 0 if ('new_database' in kwargs): newdb = kwargs['new_database'] else: newdb = 0 #### #os.system('date') #### root = search_via_country(c_code, info_container, records_container) tables = get_all_tables(root, info_container['base_url']) qurl = get_query_url(info_container, kwargs) info_container['url'] = qurl info_container['method'] = 'GET' info_container['req_body'] = None info_container = nvsoli.walkon(info_container, records_container=records_container) html_text = info_container['resp_body_bytes'].decode('utf-8') root = etree.HTML(html_text) eles = root.xpath('//thead/tr/th') #### #### fish = {} for i in range(0, eles.__len__()): fish[eles[i].text] = None url_dict = nvurl.url_to_dict(info_container['url']) qd = nvurl.urldecode(url_dict['query']) all_country_eles = root.xpath("//tr[@class='t_value1']") country = country_island_dict[str(c_code)] fn = '../INFOS/' + 'COUNTRYANDISLAND/' + country + '/' + qd[ 'cpresence'] + '/' + qd['vhabitat'] + '/' if (os.path.exists(fn)): pass else: os.makedirs(fn) picfn = '../PICS/' + 'COUNTRYANDISLAND/' + country + '/' + qd[ 'cpresence'] + '/' + qd['vhabitat'] + '/' if (os.path.exists(picfn)): pass else: os.makedirs(picfn) thumbfn = '../THUMBNAILS/' + 'COUNTRYANDISLAND/' + country + '/' + qd[ 'cpresence'] + '/' + qd['vhabitat'] + '/' if (os.path.exists(thumbfn)): pass else: os.makedirs(thumbfn) #### #### #### if (bool(newdb)): fishes = {} else: fishes_dir = fn + "fishes.dict" print(fishes_dir) if (os.path.exists(fishes_dir)): fd = open(fishes_dir, 'r+') fishes_text = fd.read() fishes = json.loads(fishes_text) fd.close() else: fishes = {} #### print('--------------------') from xdict.jprint import paint_str print( paint_str( "===============fishes loads completed======================", single_color='yellow')) print(fishes.keys()) print('----------------') #os.system('date') #### for i in range(0, all_country_eles.__len__()): fish_ele = all_country_eles[i] nfish = get_fish_info(fishes, fish_ele, fish, info_container, records_container) ###### print( paint_str( "===============nfish load completed======================", single_color='green')) ###### if (nfish): #### print("====handle new nfish========") #### nfish['eles-seq'] = i nfish['images-dir'] = picfn nfish['info-dir'] = fn fishes[nfish['Species']['name']] = nfish nfdir = fn + nfish['Species']['name'] + '/' if (os.path.exists(nfdir)): pass else: os.makedirs(nfdir) nffn = nfdir + 'fish.dict' infofn = nfdir + 'fish.info' nvft.write_to_file(fn=nffn, content=json.dumps(nfish), op='w+') info = get_printed_str(nfish, with_color=0, display=display) nvft.write_to_file(fn=infofn, content=info, op='w+') else: #### print("===bypass existed fish====") #### pass #---------------------------------------# #### print( paint_str( "===============all nfish es load completed======================", single_color='yellow')) print(fishes.keys()) print(fishes.keys().__len__()) #### dfn = fn + 'fishes.dict' if (os.path.exists(dfn)): pass else: nvft.write_to_file(fn=dfn, content=json.dumps(fishes), op='w+') ldfn = fn + 'fishes.lines' if (os.path.exists(ldfn)): pass else: nvft.write_to_file(fn=ldfn, content='', op='w+') for key in fishes: nfish = fishes[key] nvft.write_to_file(fn=ldfn, content=get_printed_str(nfish, with_color=0, display=display), op='a+') nvft.write_to_file(fn=ldfn, content='\n', op='a+') #---------------------------------------# #### print("-----get all_photos ready----") #### apafn = fn + 'pics.array' if (os.path.exists(apafn)): fd = open(apafn, 'r+') apa_text = fd.read() all_photos = json.loads(apa_text) fd.close() else: all_photos = [] for name in fishes: #all_photos = all_photos + copy.deepcopy(fishes[name]['All-Photos']) #all_photos = all_photos + copy.deepcopy(fishes[name]['All-Photos']) for photo in fishes[name]['All-Photos']: all_photos.append(photo) #### print("all_photos gotted") print(all_photos.__len__()) #### types = [] for each in all_photos: type = each['type'] if (type in types): pass else: if (type == None): pass else: types.append(type) for type in types: typefn = picfn + type if (os.path.exists(typefn)): pass else: os.makedirs(typefn) typefn = thumbfn + type if (os.path.exists(typefn)): pass else: os.makedirs(typefn) for each in all_photos: if (each['type'] == None): each['img-dir'] = None each['thumb-dir'] = None else: img_dir = picfn + each['type'] + '/' + each['img-name'] each['img-dir'] = img_dir thumb_dir = thumbfn + each['type'] + '/' + each['img-name'] each['thumb-dir'] = thumb_dir apafn = fn + 'pics.array' if (os.path.exists(apafn)): pass else: nvft.write_to_file(fn=apafn, content=json.dumps(all_photos), op='w+') lapafn = fn + 'pics.lines' if (os.path.exists(lapafn)): pass else: nvft.write_to_file(fn=lapafn, content='', op='w+') for each in all_photos: nvft.write_to_file(fn=lapafn, content=get_printed_str(each, with_color=0, display=display), op='a+') nvft.write_to_file(fn=lapafn, content='\n', op='a+') ############################ print("pics.lines and pics.array ready") ############################ imagename_dir_dict = {} dir_imagename_dict = {} for each in all_photos: if (each['type'] != None): imagename = each['img-name'] dir = each['img-dir'] else: imagename = None dir = None imagename_dir_dict[imagename] = dir dir_imagename_dict[dir] = imagename iddfn = fn + 'image_dir.dict' didfn = fn + 'dir_image.dict' if (os.path.exists(iddfn)): pass else: nvft.write_to_file(fn=iddfn, content=json.dumps(imagename_dir_dict), op='w+') liddfn = fn + 'image_dir.lines' if (os.path.exists(liddfn)): pass else: nvft.write_to_file(fn=liddfn, content='', op='w+') for each in imagename_dir_dict: nvft.write_to_file(fn=liddfn, content=get_printed_str(each, with_color=0, display=display), op='a+') nvft.write_to_file(fn=liddfn, content='\n', op='a+') if (os.path.exists(didfn)): pass else: nvft.write_to_file(fn=didfn, content=json.dumps(dir_imagename_dict), op='w+') ldidfn = fn + 'dir_image.lines' if (os.path.exists(ldidfn)): pass else: nvft.write_to_file(fn=ldidfn, content=get_printed_str(dir_imagename_dict, with_color=0, display=display), op='w+') ############### print("==dir_image.dict and dir_image.lines gotted==") ############## thumb_dir_dict = {} dir_thumb_dict = {} for each in all_photos: if (each['type'] != None): imagename = each['img-name'] dir = each['thumb-dir'] else: imagename = None dir = None thumb_dir_dict[imagename] = dir dir_thumb_dict[dir] = imagename iddfn = fn + 'thumb_dir.dict' didfn = fn + 'dir_thumb.dict' if (os.path.exists(iddfn)): pass else: nvft.write_to_file(fn=iddfn, content=json.dumps(thumb_dir_dict), op='w+') liddfn = fn + 'thumb_dir.lines' if (os.path.exists(liddfn)): pass else: nvft.write_to_file(fn=liddfn, content='', op='w+') for each in thumb_dir_dict: nvft.write_to_file(fn=liddfn, content=get_printed_str(each, with_color=0, display=display), op='a+') nvft.write_to_file(fn=liddfn, content='\n', op='a+') if (os.path.exists(didfn)): pass else: nvft.write_to_file(fn=didfn, content=json.dumps(dir_thumb_dict), op='w+') ldidfn = fn + 'dir_thumb.lines' if (os.path.exists(ldidfn)): pass else: nvft.write_to_file(fn=ldidfn, content=get_printed_str(dir_thumb_dict, with_color=0, display=display), op='w+') ############### print("===dir_thumb.lines and thumb_dir.dict gotted===") ############### print("begin download images") ############### for each in all_photos: if (each['type'] != None): imagename = each['img-name'] img_dir = each['img-dir'] img_url = each['img-url'] thumb_dir = each['thumb-dir'] thumb_url = each['thumbnail-url'] if (os.path.exists(img_dir)): #### print(paint_str("pass_by_pic", single_color="red")) #### pass else: info_container['url'] = img_url info_container = nvsoli.walkon( info_container, records_container=records_container) info_container = nvsoli.auto_redireced(info_container, records_container) nvft.write_to_file(fn=img_dir, content=info_container['resp_body_bytes'], op='wb+') #### print("downloaded one pic") #### if (os.path.exists(thumb_dir)): #### print(paint_str("pass_by_thumb", single_color="red")) #### pass else: info_container['url'] = thumb_url info_container = nvsoli.walkon( info_container, records_container=records_container) info_container = nvsoli.auto_redireced(info_container, records_container) nvft.write_to_file(fn=thumb_dir, content=info_container['resp_body_bytes'], op='wb+') #### print("downloaded one thumb") #### else: print("---external pics not downloaded in this version,pass--") pass return ((info_container, records_container))
kstart = 0 xstart = 0 ystart = 0 else: istart = json.loads(content)['istart'] jstart = json.loads(content)['jstart'] kstart = json.loads(content)['kstart'] xstart = json.loads(content)['xstart'] ystart = json.loads(content)['ystart'] try: content_curls = nvft.read_file_content(fn='../curls.dict', op='r') content_cnames = nvft.read_file_content(fn='../cnames.dict', op='r') except: curls, cnames = get_country_urls(locs_url) nvft.write_to_file(fn='../curls.dict', content=json.dumps(curls), op='w+') nvft.write_to_file(fn='../cnames.dict', content=json.dumps(cnames), op='w+') else: curls = json.loads(content_curls) cnames = json.loads(content_cnames) try: content_country_md = nvft.read_file_content(fn='../country.dict', op='r') except: country_md = creat_country_md(curls, cnames) nvft.write_to_file(fn='../country.dict', content=json.dumps(country_md), op='w+') else:
def get_species(root): eles_sps = root.xpath( '//tr/td/span/a | //tr/td/em/strong/a | //tr/td/a | //tr/td/strong/a | //tr/td/em/a | //tr/td/a | //tr/td/strong/em/a' ) new_eles_sps = [] for i in range(0, eles_sps.__len__()): if (('#' in eles_sps[i].attrib['href']) | ('strombidae' in eles_sps[i].attrib['href']) | ('images' in eles_sps[i].attrib['href'])): new_eles_sps.append(eles_sps[i]) else: pass del new_eles_sps[-1] ##################### ele_cnames = [] for i in range(0, new_eles_sps.__len__()): td_parent = new_eles_sps[i].getparent() while (td_parent.tag != 'td'): td_parent = td_parent.getparent() td_next = td_parent.getnext() ele_cnames.append(td_next) ##################### urls = [] for i in range(0, new_eles_sps.__len__()): urls.append(ryan_base_url + new_eles_sps[i].attrib['href']) ##################### dir_names = [] for i in range(0, new_eles_sps.__len__()): dir_names.append(new_eles_sps[i].attrib['href'].replace('.htm', '').replace( '#', ' ')) #################### new_urls_set = set({}) for i in range(0, urls.__len__()): url = urls[i] url = url.split('#')[0] new_urls_set.add(url) #################### image_urls = [] for url in new_urls_set: info_container['url'] = url info_container = nvsoli.walkon(info_container, records_container=records_container) root = get_etree_root(info_container) eles = root.xpath('//tr/td/div/img') for j in range(0, eles.__len__()): image_urls.append( (ryan_base_url + eles[j].attrib['src']).replace(' ', '%20')) ##################### mirror_indexes = {} image_names = [] info_names = [] infos = [] for i in range(0, image_urls.__len__()): suffix = image_urls[i].split('.')[-1] arr = os.path.basename(image_urls[i]).split('%20') name = arr[0] + ' ' + arr[1].rstrip(',').rstrip('.').rstrip(' ') + '_' name = name + hashlib.sha1(image_urls[i].encode('utf-8')).hexdigest() name = name + '.' + suffix image_names.append(name) info_names.append(name + '.' + 'info') info = {} info['origin'] = image_urls[i] info['path'] = '' info['details'] = {} infos.append(info) mirror_indexes[name] = image_urls[i] mirror_indexes[image_urls[i]] = name info_container['url'] = image_urls[i] info_container = nvsoli.walkon(info_container, records_container=records_container) nvft.write_to_file(fn=photosdir + '/' + image_names[i], op='wb', content=info_container['resp_body_bytes']) nvft.write_to_file(fn=photosdir + '/' + 'indexes.dict', op='w', content=json.dumps(mirror_indexes))