def memory_free_size(): size = 0 system_name = SystemLocalInfo.system() if system_name == 'linux': s = Util.execute_and_output('free -m', True) if s == None: SystemLocalInfo.error_info_str = "checking for free -m command fails ... no" return -1 s_prefix = 'buffers/cache' s_code = s[s.find(s_prefix):s.find(s_prefix)+50] size = re.findall("\d+", s_code)[1] return int(size) elif system_name == 'solaris': s = Util.execute_and_output('top | head -n 4', True) if s == None: SystemLocalInfo.error_info_str = "checking for top | head -n 4 command fails ... no" return -1 size = re.findall("(\d+)M free mem", s)[0] return int(size) else: SystemLocalInfo.error_info_str = None return -1
def __do_pack(build_home, source, distribution, cmode, tag, force_update): url = '' pack_path = "" if tag != None: url = tag pack_path = 'app' result = Pack.__build_source(build_home, url, cmode, force_update, source[0]["binary_prefix"], pack_path) if result == False: return False else: for index in range(len(source)): url = source[index]["trunk"] if dict(source[index]).get("pack_path") == None: pack_path = 'app' else: pack_path = source[index]["pack_path"] result = Pack.__build_source(build_home, url, cmode, force_update, source[index]["binary_prefix"], pack_path) if result == False: return False Pack.__build_version(build_home, source, cmode, tag) Util.execute_and_output('rm -fr ' + build_home + '/.build') print('Del [.build] OK!') Pack.__make_package(build_home, distribution, cmode) return True
def extractField(self,aRawColumn): #verify field pattern first aRawColumn = aRawColumn.replace(" ","") # remove spaces for the column columnStyleOk = Util.checkColumnNameStyle(aRawColumn) field = None if(columnStyleOk): pipeSignIndex = aRawColumn.index('|') firstBracketIndex = aRawColumn.index('(') columnName = aRawColumn[pipeSignIndex+1:firstBracketIndex] columnType = aRawColumn[firstBracketIndex+1:-1] field = MField() field.fieldName = columnName field.fieldType = columnType isFieldTypeValid = Util.getDictValue(Rules.dataTypes, columnType, False) if not isFieldTypeValid: raise Exception ("Field type is not valid...") else: # don't throw exception just ignore the column #raise Exception("Column name not valid " + str(aRawColumn)) None return field
def remove_tree(self, item, cmode): if (item == None): h_child_item = self.get_root_item() else: h_child_item = self.get_child_item(item) while(h_child_item != None): if (self.get_child_item(h_child_item) == None): item_text = self.get_item_text(h_child_item) if str(item_text).find("_" + cmode[0:2] + "_") != -1: full_svn_path = self.get_full_path(h_child_item, '|')[0] svn_root_path = full_svn_path[:str(full_svn_path).find('|')] cache_root_path = Glo.get_local_cache_path(svn_root_path, self.__repositories) if cache_root_path == None: print svn_root_path + ' does not exist in .buildc.rc' sys.exit(Errors.conf_item_not_found) full_cache_path = cache_root_path + '|' + full_svn_path[str(full_svn_path).find('|')+1:] real_cache_path = str(full_cache_path).replace('|', '/') real_cache_version_path = real_cache_path[:str(real_cache_path).rfind('/')] real_cache_name_path = real_cache_version_path[:str(real_cache_version_path).rfind('/')] if h_child_item.data == 'none': if os.path.exists(real_cache_path): ret = Util.execute_and_return("rm -rf " + real_cache_path + "/.svn " + real_cache_path) if (ret != 0): print 'Remove [' + real_cache_path + '] Failed!' sys.exit(ret) else: print 'Remove [' + real_cache_path + '] OK!' else: if not os.path.exists(real_cache_path): pass else: ret = Util.execute_and_return("rm -rf " + real_cache_path + "/.svn " + real_cache_path) if (ret != 0): print 'Remove [' + real_cache_path + '] Failed!' sys.exit(ret) else: print 'Remove [' + real_cache_path + '] OK!' if os.path.exists(real_cache_version_path): if len(os.listdir(real_cache_version_path)) == 0: os.rmdir(real_cache_version_path) if os.path.exists(real_cache_name_path): if len(os.listdir(real_cache_name_path)) == 0: os.rmdir(real_cache_name_path) if os.path.exists(cache_root_path): if len(os.listdir(cache_root_path)) == 0: os.rmdir(cache_root_path) h_child_item.data = 'none' self.remove_tree(h_child_item, cmode) h_child_item = self.get_next_sibling_item(h_child_item)
def system_bit(): bit = None system_name = SystemLocalInfo.system() if system_name == 'linux': bit = Util.execute_and_output('getconf LONG_BIT') return bit elif system_name == 'solaris': bit = Util.execute_and_output('isainfo -b') return bit else: return 'unknown'
def system_version(): version = None system_name = SystemLocalInfo.system() if system_name == 'linux': s = Util.execute_and_output('cat /etc/redhat-release') s_prefix = 'release' s_code = s[s.find(s_prefix):s.find(s_prefix)+50] version = re.findall("((\d+\.)*\d+)", s_code)[0][0] return version elif system_name == 'solaris': version = Util.execute_and_output('uname -r') return version else: return 'unknown'
def update_tree(self, item, cmode, ignore_error): if (item == None): h_child_item = self.get_root_item() else: h_child_item = self.get_child_item(item) while(h_child_item != None): if (self.get_child_item(h_child_item) == None): item_text = self.get_item_text(h_child_item) if str(item_text).find("_" + cmode[0:2] + "_") != -1: full_svn_path = self.get_full_path(h_child_item, '|')[0] real_svn_path = str(full_svn_path).replace('|', '/') svn_root_path = full_svn_path[:str(full_svn_path).find('|')] cache_root_path = Glo.get_local_cache_path(svn_root_path, self.__repositories) if cache_root_path == None: print svn_root_path + ' does not exist in .buildc.rc' sys.exit(Errors.conf_item_not_found) full_cache_path = cache_root_path + '|' + full_svn_path[str(full_svn_path).find('|')+1:] real_cache_path = str(full_cache_path).replace('|', '/') real_cache_version_path = real_cache_path[:str(real_cache_path).rfind('/')] real_cache_name_path = real_cache_version_path[:str(real_cache_version_path).rfind('/')] dep_libname = real_cache_name_path[str(real_cache_name_path).rfind('/')+1:] dep_libversion = real_cache_version_path[str(real_cache_version_path).rfind('/')+1:] (trunk_user, trunk_passwd) = self.__get_user_and_passwd_by_url(svn_root_path) svn_revision_code = SvnLocalOper.get_svn_info_revision_code(real_svn_path, True, trunk_user, trunk_passwd) if h_child_item.data == 'none': if os.path.exists(real_cache_path): Util.execute_and_return("rm -rf " + real_cache_path) else: if not os.path.exists(real_cache_path): print 'library [' + dep_libname + ' ' + dep_libversion + '] does not exist!' print 'Checkout [' + real_svn_path + ']...' SvnLocalOper.checkout(real_svn_path, real_cache_path, ignore_error, trunk_user, trunk_passwd) print 'Checkout [' + real_svn_path + '] OK!' else: cache_revision_code = SvnLocalOper.get_svn_info_revision_code(real_cache_path, None) if svn_revision_code != cache_revision_code: print 'Update [' + dep_libname + ' ' + dep_libversion + ']...' SvnLocalOper.update(real_cache_path, ignore_error, trunk_user, trunk_passwd) print 'Update [' + dep_libname + ' ' + dep_libversion + '] OK!' h_child_item.data = svn_revision_code self.update_tree(h_child_item, cmode, ignore_error) h_child_item = self.get_next_sibling_item(h_child_item)
def cache_remove(): dotrc = Glo.dot_buildc_rc_path() if not os.path.exists(dotrc): print 'Can not found ' + dotrc print 'Please run buildc init and then config .buildc.rc!' sys.exit(Errors.conf_file_not_found) buildc_rc = Load.load_dot_buildc_rc(dotrc) for repository in buildc_rc.external_repositories: svn_path = repository[0] cache_path = Glo.get_local_cache_path(svn_path, buildc_rc.external_repositories) print "\n===>Begin remove local cache of repository [" + svn_path + ']' ret = Util.execute_and_return('rm -rf ' + cache_path) if (ret != 0): print 'Remove [' + cache_path + '] Failed!' sys.exit(ret) else: print 'Remove [' + cache_path + '] OK!' print "\n<=== End remove local cache of repository [" + svn_path + ']' dotrepository = Glo.dot_buildc_repository_path() svn_tree = SvnTree() svn_tree.import_format_tree_from_file(dotrepository) svn_tree.take_item_data_by_browse(None, SvnTree.set_empty_node, 1) svn_tree.export_format_tree_to_file(dotrepository)
def findTablesInSheet(self,currSheet): for row_index in xrange(currSheet.nrows): for col_index in xrange(currSheet.ncols): cellValue = Util.cellVal(currSheet, row_index, col_index) if cellValue == Rules.idColumnName: #means table is started (in case if sheet has multiple tables) return MCell(rowIndex=row_index,colIndex=col_index) return None
def __make(cmode): cmd_str = 'make CMODE=' + cmode + ' > make.log 2>&1' err = Util.execute_and_return(cmd_str) f = open('make.log', 'r') print f.read() f.close() if err != 0: print "Failed to execute cmd [%s], errno = [%d]" % (cmd_str, err) sys.exit(err)
def memory_total_size(): size = 0 system_name = SystemLocalInfo.system() if system_name == 'linux': s = Util.execute_and_output('free -m', True) if s == None: SystemLocalInfo.error_info_str = "checking for free -m command fails ... no" return -1 size = re.findall("\d+", s)[0] return int(size) elif system_name == 'solaris': s = Util.execute_and_output('/usr/sbin/prtconf -vp | grep Mem') size = re.findall("\d+", s)[0] return int(size) else: SystemLocalInfo.error_info_str = None return -1
def get_shell_type(): cmd_output = Util.execute_and_output('echo $SHELL') match_str = re.findall('\/.*\/.*', cmd_output)[0] if match_str == '/bin/csh': return 'csh' elif match_str == '/bin/bash': return 'bash' else: return 'unknown'
def system(): s = Util.execute_and_output('uname -s') if (s == 'Linux'): return 'linux' if (s == 'SunOS'): return 'solaris' if (s == 'CYGWIN_NT-6.1-WOW64'): return 'CYGWIN_NT-6.1-WOW64' else: return 'unknown'
def cpu(): s = Util.execute_and_output('uname -p') if (s == 'sparc'): return 'sparc' if (s == 'i386'): return 'x86' if (s == 'x86_64'): return 'x86' else: return 'unknown'
def __do_component_src_pack(build_home, source, distribution, cmode, tag, force_update): url = '' if tag != None: url = tag is_valid = Pack.__build_component_src(build_home, url, cmode, force_update) if is_valid == False: return False else: for index in range(len(source)): url = source[index]["trunk"] is_valid = Pack.__build_component_src(build_home, url, cmode, force_update) if is_valid == False: return False Pack.__build_version(build_home, source, cmode, tag) Pack.__make_component_src_package(build_home, distribution, cmode) Util.execute_and_output('rm -fr ' + build_home + '/.build') print 'Del [.build] OK!' return True
def __make_component_all_package(build_home, distribution, cmode): os.chdir(build_home) Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.package') print "Create dir [.package] OK!" target_prefix = distribution["packname"] + \ '-' + distribution["version"] + \ '-' + Glo.CPU + \ '-' + Glo.SYSTEM + \ '-' + cmode[:2] + 'bit' + '-full' + Glo.PACK_SUFFIX src_path = build_home + os.sep + 'src' + os.sep + 'deps' dst_path = build_home + os.sep + '.package' + os.sep + target_prefix + os.sep + 'deps' DataOper.copy_tree_ex(src_path, dst_path, [".svn"], ["*"], True) print "copy %s to %s" % (src_path, dst_path) src_path = build_home + '/.build' dst_path = build_home + '/.package/' + target_prefix DataOper.copy_tree_ex(src_path, dst_path, [".svn"], ["*"], True) print "copy %s to %s" % (src_path, dst_path) shutil.copy2(build_home + '/src/VERSION', build_home + '/.package/' + target_prefix) os.chdir(build_home + os.sep + '.package') print "Cd " + build_home + os.sep + '.package' Util.execute_and_output('tar cvf ' + target_prefix + '.tar ' + target_prefix) print 'Generate ' + target_prefix + '.tar' + ' OK!' Util.execute_and_output('gzip ' + target_prefix + '.tar') print('Zip ' + target_prefix + '.tar' + ' OK!') os.chdir(build_home) print('Cd ' + build_home) Util.execute_and_output('mv .package/' + target_prefix + '.tar.gz ' + 'distributions') Util.execute_and_output('rm -fr ' + build_home + '/.package') print 'Del [.package] OK!' print 'Make target [' + target_prefix + '.tar.gz] OK!'
def __build_component_deps(build_home, url, cmode, force_update): if not os.path.exists(build_home + os.sep + '.build'): Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.build') os.chdir(build_home + os.sep + '.build') print "Create dir [.build] OK!" Util.execute_and_output('rm -rf ' + url[url.rindex("/")+1:]) SvnLocalOper.export(url, None, None, Glo.source_svn_user(), Glo.source_svn_passwd(), False) print "Export [" + url + "] OK!" source_home = build_home + '/.build/' + url[url.rindex("/")+1:] os.chdir(source_home) print "Cd " + source_home dotrc = Glo.dot_buildc_rc_path() if not os.path.exists(dotrc): print('Can not found ' + dotrc) print('Please run buildc init and then config .buildc.rc!') sys.exit(Errors.conf_file_not_found) buildc_rc = Load.load_dot_buildc_rc(dotrc) buildc_cfg = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str()) is_valid = Cache.cache_build_by_external_libs(buildc_cfg.external_libs, cmode, force_update) if is_valid == False: os.chdir(build_home) print "Cd " + build_home return False dotrepository = Glo.dot_buildc_repository_path() svn_tree = SvnTree() svn_tree.import_format_tree_from_file(dotrepository) for dependence in buildc_cfg.external_libs: Pack.__copy_dependent_all(dependence, svn_tree, buildc_rc, build_home, cmode) os.chdir(build_home) print "Cd " + build_home print 'Build deps [' + url + '] OK!' return True
def export(svn_path, download_path = None, ignore_error = None, trunk_user = None, trunk_pass = None, ignore_hint = True): if download_path != None: Util.execute_and_output("rm -rf " + download_path) cmd_str = "svn export " + "\"" + svn_path +"\"" if download_path != None: cmd_str += " " + "\"" + download_path +"\"" if trunk_user != None and trunk_pass != None and \ trunk_user != "" and trunk_pass != "": cmd_str += " " + "--username " + trunk_user cmd_str += " " + "--password " + trunk_pass cmd_str += " --no-auth-cache" if not ignore_hint: print "command: " + cmd_str err = Util.execute_and_return(cmd_str) if err != 0: print "Failed to execute cmd [%s], errno = [%d]" % (cmd_str, err) if (ignore_error == None): sys.exit(err) return False return True
def __build_component_src(build_home, url, cmode, force_update): if not os.path.exists(build_home + os.sep + '.build'): Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.build') os.chdir(build_home + os.sep + '.build') print "Create dir [.build] OK!" Util.execute_and_output('rm -rf ' + url[url.rindex("/")+1:]) SvnLocalOper.export(url, None, None, Glo.source_svn_user(), Glo.source_svn_passwd(), False) print "Export [" + url + "] OK!" source_home = build_home + '/.build/' + url[url.rindex("/")+1:] os.chdir(source_home) print "Cd " + source_home is_valid = Makerules.config_make(cmode, force_update, "$(shell cd ../.; pwd)/deps", "$(shell cd .; pwd)") if is_valid == False: os.chdir(build_home) print "Cd " + build_home return False print "Config Make.rules OK!" Util.execute_and_output('rm -f buildc.cfg') print "Remove buildc.cfg OK!" cmd_str =\ """#! /bin/sh topdir=`pwd` parentdir=`cd ../.; pwd` sed -e "1,$ s%=.*@topdir@%= $topdir#@topdir@%g" Make.rules |\ sed -e "1,$ s%\$(shell cd ../.; pwd)/deps%$parentdir/deps%g"\ > .Make.rules mv .Make.rules Make.rules echo "config Make.rules OK!" """ f = open("configure", "w") f.write(cmd_str) f.close() Util.execute_and_output('chmod +x configure') print "Create configure OK!" os.chdir(build_home) print "Cd " + build_home print 'Build src [' + url + '] OK!' return True
def update(download_path, ignore_error = None, trunk_user = None, trunk_pass = None): cmd_str = "svn update " + "\"" + download_path +"\"" if trunk_user != None and trunk_pass != None and \ trunk_user != "" and trunk_pass != "": cmd_str += " " + "--username " + trunk_user cmd_str += " " + "--password " + trunk_pass cmd_str += " --no-auth-cache" err = Util.execute_and_return(cmd_str) if err != 0: print "Failed to execute cmd [%s], errno = [%d]" % (cmd_str, err) if (ignore_error == None): sys.exit(err) return False return True
def get_svn_ls(svn_path, ignore_error = None, trunk_user = None, trunk_pass = None): cmd_str = "svn ls " + "\"" + svn_path +"\"" if trunk_user != None and trunk_pass != None and \ trunk_user != "" and trunk_pass != "": cmd_str += " " + "--username " + trunk_user cmd_str += " " + "--password " + trunk_pass cmd_str += " --no-auth-cache" svn_ls_str = Util.execute_and_output(cmd_str, ignore_error) if svn_ls_str == None: return [] if svn_ls_str == "": return [] item_nodes = str(svn_ls_str).split("\n") return item_nodes
def __build_source(build_home, url, cmode, force_update, binary_prefix, pack_path): Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.build') os.chdir(build_home + os.sep + '.build') print "Create dir [.build] OK!" SvnLocalOper.export(url, None, None, Glo.source_svn_user(), Glo.source_svn_passwd(), False) print "Export [" + url + "] OK!" source_home = build_home + os.sep + '.build' + os.sep + url[url.rindex("/")+1:] os.chdir(source_home) print "Cd " + source_home result = Makerules.config_make(cmode, force_update) if result == False: print "Config Make.rules Error!" os.chdir(build_home) print "Cd " + build_home return False print "Config Make.rules OK!" Pack.__make(cmode) print "Make OK!" if pack_path != "": des_path = build_home + os.sep + 'src' + os.sep + pack_path if not os.path.exists(des_path): os.mkdir(des_path) Util.execute_and_output('cp ' + binary_prefix + '* ' + des_path) print "Copy binary file to [" + build_home + os.sep + 'src' + os.sep + pack_path + ']' + " OK!" os.chdir(build_home) print "Cd " + build_home print 'Build source [' + url + '] OK!' return True
def __make_package(build_home, distribution, cmode): os.chdir(build_home) Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.package') print "Create dir [.package] OK!" target_prefix = distribution["packname"] + \ '-' + distribution["version"] + \ '-' + Glo.CPU + \ '-' + Glo.SYSTEM + \ '-' + cmode[:2] + 'bit' + Glo.PACK_SUFFIX src_path = build_home + os.sep + 'src' dst_path = build_home + os.sep + '.package' + os.sep + target_prefix DataOper.copy_tree_ex(src_path, dst_path, [".svn"], ["*"], True) print "copy %s to %s" % (src_path, dst_path) os.chdir(build_home + os.sep + '.package') print "Cd " + build_home + os.sep + '.package' Util.execute_and_output('tar cvf ' + target_prefix + '.tar ' + target_prefix) print 'Generate ' + target_prefix + '.tar' + ' OK!' Util.execute_and_output('gzip ' + target_prefix + '.tar') print('Zip ' + target_prefix + '.tar' + ' OK!') os.chdir(build_home) print('Cd ' + build_home) des_path = 'distributions' if not os.path.exists(des_path): os.mkdir(des_path) Util.execute_and_output('mv .package/' + target_prefix + '.tar.gz ' + des_path) Util.execute_and_output('rm -fr ' + build_home + '/.package') print 'Del [.package] OK!' print('Make target [' + target_prefix + '.tar.gz] OK!')
def postHandler(self,renderer): form = web.input(userfile={},comment=None) submittedFileName = str(form['userfile'].filename) if Util.getIndexOfStringInString(submittedFileName, ".csv", None): pass # do stuff with csv to xlsx tempDir=Util.createDirForTempFiles() result = "Success!" isError = 0 try: filePath= Util.createTempFileWithData(tempDir,form['userfile'].value) workBook=Util.openWorkBook(filePath) result = Util.dictToJson(self.workBookToJson(workBook)) fileName = None try: metaSheet=workBook.sheet_by_name(Rules.metaSheetName) fileName = Util.cellVal(metaSheet,0,1) except: None if(not fileName): fileName = "data.py" attachmentName = 'attachment; filename="%s" ' % (fileName) web.header('Content-type','application/octet-stream') web.header('Content-transfer-encoding','base64') web.header('Content-Disposition',attachmentName) return result except Exception, e: isError = 1 print "IN Excepetion" Util.printStackTraceInConsole() result = "Failed %s " % str(e) print str("result it %s " % result)
def get_svn_info_revision_code(path, ignore_error = None, trunk_user = None, trunk_pass = None): cmd_str = "svn info " + "\"" + path +"\"" if trunk_user != None and trunk_pass != None and \ trunk_user != "" and trunk_pass != "": cmd_str += " " + "--username " + trunk_user cmd_str += " " + "--password " + trunk_pass cmd_str += " --no-auth-cache" svn_info_str = Util.execute_and_output(cmd_str, ignore_error) if svn_info_str == None: return "" if svn_info_str.startswith("svn: warning:"): return "" revision_code = re.findall(" (\d+)\n", svn_info_str) #revision_code: list of two elements, a revision number, a last changed rev if len(revision_code) == 0: return "" return revision_code[0]
def add_info_by_urls(urls): count = 0 total_count = 0 for i in urls: print '本次处理的网址是:' print i try: html = requests.get(i, headers=headers) except: print '处理失败,10分钟后重试' time.sleep(10) html = requests.get(i, headers=headers) soup = BeautifulSoup(html.text, 'html.parser') job_info = soup.find('div', 'info-primary') try: job_title = job_info.find('div', 'name').find('h1', '').text.encode( 'UTF-8') #获取工作title except: print 'ip被限制了,给你60s 手动解冻下' time.sleep(60) try: html = requests.get(i, headers=headers) except: print '处理失败,10分钟后重试' time.sleep(10) html = requests.get(i, headers=headers) soup = BeautifulSoup(html.text, 'html.parser') job_info = soup.find('div', 'info-primary') job_title = job_info.find('div', 'name').find('h1', '').text.encode( 'UTF-8') #获取工作title money = job_info.find('div', 'name').find('span', 'salary').text.encode( 'UTF-8') #获取工资 job_requirements = job_info.find('p').find_all( text=re.compile('.*')) # work_location = ''.join(job_requirements[0]).encode('UTF-8') #工作地点 work_experience = ''.join(job_requirements[1]).encode('UTF-8') #经验要求 education = ''.join(job_requirements[2]).encode('UTF-8') #学历要求 job_box = soup.find('div', 'job-box') jd = job_box.find('div', 'text').text.encode('UTF-8') #工作描述 try: company_name = job_box.find('div', 'detail-content').find( 'div', 'name').text.encode('UTF-8') #公司名称 company_type = ''.join( job_box.find('li', 'company-type').find_all( text=re.compile('.*'))[1]).encode('UTF-8') #公司类型 except: company_name_str = soup.title.text.encode('UTF-8') company_name = company_name_str.split('_')[1].split('-')[0] company_type = '未知' create_times = datetime.datetime.now().date() url = i util = Util() md5_str = job_title + company_name + str(url) md5 = util.get_md5(md5_str) sql = """ insert into boss_spider (job_title,company_name,company_type,money,work_location,work_experience,education,jd,url,create_times,md5) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) on DUPLICATE KEY UPDATE param = %s; """ count_sql = """ select param from boss_spider where md5 = %s; """ dbutil = DBUtil(_ZECHN_DB) # print count_sql % md5 spider_count = dbutil.read_one(count_sql, md5) if spider_count == None: count += 1 print '有一个新岗位出现呦!' else: spider_count = spider_count[0] spider_count = int(spider_count) + 1 #重复爬取次数 params = [ job_title, company_name, company_type, money, work_location, work_experience, education, jd, url, create_times, md5, spider_count ] try: result = dbutil.execute(sql, params) except: print '有一条插入失败 url为:' print url total_count += 1 if (total_count % 17 == 0): print '每爬完17条具体职位数据后休息1s' time.sleep(1) print '正在处理第%d条职位数据' % total_count sleep_time = random.uniform(0, 1) # print '处理完成,睡眠%s秒,继续爬下一条数据' % sleep_time print '..............................................' # time.sleep(sleep_time) print 'success!一共发现了%d条新职位' % count
def main(): global proxy_client, proxy_server LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s" log_filename = 'logs/test_' + time.strftime("%Y%m%d", time.localtime()) + '.log' logging.basicConfig(filename=log_filename, level=logging.INFO, format=LOG_FORMAT) # 当前脚本所在目录路径 path = os.path.dirname(os.path.realpath(__file__)) test_config = TestConfig(path) if len(sys.argv) > 1: param_type = sys.argv[1] # 使用命令显示目录下文件 if 'ls' == param_type: files = getFileName(path + '/config/') print(Util.pretty(files)) return elif '-f' == param_type: # 使用文件路径方式调用测试用例 if len(sys.argv) < 3: print('请输入测试用例配置文件路径(绝对路径)') return else: test_case_path = sys.argv[2] report_name = '使用配置文件自动化测试' if os.path.exists(test_case_path): print('测试用例文件路径为:' + test_case_path) else: print('测试用例文件不存在') return else: # 直接传入测试用例名称 report_name = param_type else: report_name = 'default' # 获取测试用例数据 config = test_config.get_test_case(sys.argv) # 是否开启代理 is_open_proxy = config.get('BROWSER').get('proxy') if is_open_proxy: from browsermobproxy import Server bmp_path = config.get('BROWSER').get('bmp_path') logging.info('开启代理 ' + bmp_path) proxy_server = Server(bmp_path) proxy_server.start() proxy_client = proxy_server.create_proxy() # 浏览器类型 browser_type = config.get('BROWSER').get('type') # 是否使用H5测试,并指定移动设备名称 h5 = config.get('BROWSER').get('H5', False) device_name = config.get('BROWSER').get('device_name', 'iPhone 7') # 是否开启无头模式 headless = config.get('BROWSER').get('headless', False) if browser_type == 'Firefox': options = FirefoxOptions() if headless: options.add_argument("-headless") options.page_load_strategy = 'normal' if h5: user_agent = "Mozilla/5.0 (iPhone; U; CPU iPhone OS 3_0 like Mac OS X; en-us) AppleWebKit/528.18 (KHTML, like Gecko) Version/4.0 Mobile/7A341 Safari/528.16" options.set_preference("general.useragent.override", user_agent) if is_open_proxy: options.add_argument('--proxy-server={0}'.format( proxy_client.proxy)) browser = webdriver.Firefox(options=options) elif browser_type == 'Chrome': options = ChromeOptions() if headless: options.add_argument('--no-sandbox') # 取消沙盒模式,浏览器的安全性会降低 options.add_argument("--window-size=1920,1080") # 设置浏览器分辨率(窗口大小) options.add_argument( "--start-maximized") # 最大化运行(全屏窗口),不设置,获取元素可能会报错 options.add_argument("--disable-extensions") # 禁用GPU加速,降低资源损耗,only for Windows but not a valid configuration for Linux OS options.add_argument('--disable-gpu') options.add_argument('--disable-dev-shm-usage') # 解决资源有限的问题 options.add_argument('--lang=en_US') options.add_argument("--headless") options.page_load_strategy = 'normal' if h5: mobileEmulation = {'deviceName': device_name} options.add_experimental_option('mobileEmulation', mobileEmulation) if is_open_proxy: options.add_argument('--proxy-server={0}'.format( proxy_client.proxy)) browser = webdriver.Chrome(options=options) else: print('浏览器' + browser_type + ':类型不支持') return False logging.info('开始使用 ' + browser_type + ' 浏览器进行自动化测试') if is_open_proxy: proxy_client.new_har("req", options={ 'captureHeaders': True, 'captureContent': True }) if browser_type == 'Firefox' and h5: browser.set_window_size(360, 640) else: browser.maximize_window() # 浏览器等待时间 # browser.implicitly_wait(10) url = config.get('WEBSITE').get('url') browser.get(url) if is_open_proxy: Http.logHar(proxy_client.har) # 执行配置的TEST对象 test = config.get('TEST') suite = unittest.TestSuite() m = Menu() for key in test: menus = m.getMenuConfig(config, key) try: if is_open_proxy: test_data = [browser, menus, proxy_client] else: test_data = [browser, menus] suite.addTest( ParametrizedTestCase.parametrize(Action, 'test_menu', param=test_data)) except AssertExcetion: print(key + " 断言失败") # 是否生成报告,默认开启调试模式,不生成报告 debug = config.get('DEBUG', True) if debug: runner = unittest.TextTestRunner() runner.run(suite) else: # 报告是否含有截图,DEBUG为False且IMAGE设置为True时生效 image = config.get('IMAGE', False) report_path = path + '/reports/' report_file = report_name + "_" + time.strftime( "%Y%m%d", time.localtime()) + '.html' fp = open(report_path + report_file, 'wb') report_title = '你的测试报告' report_desc = '使用配置:' + report_name + '生成的测试报告' runner = HTMLTestRunner(stream=fp, verbosity=2, images=image, title=report_title, description=report_desc, tester='pystest') runner.run(suite) fp.close() sleep(5) browser.quit() # send mail or not mail = config.get('MAIL') if not debug and mail and mail.get('SEND'): email_title = report_title email_content = report_desc smtp = Mail(config.get('MAIL'), report_path) smtp.connect() smtp.login() smtp.send(email_title, email_content, report_file) smtp.quit() if is_open_proxy: proxy_client.close() proxy_server.stop()
def setUpClass(cls): cls.vnfd = Util.solcon("standalone_vpc_vnfd_esc_4_4.yaml") cls.vdus = cls.vnfd["vnfd"]["vdu"]
def update_tree(self, item, cmode, ignore_error): if (item == None): h_child_item = self.get_root_item() else: h_child_item = self.get_child_item(item) while (h_child_item != None): if (self.get_child_item(h_child_item) == None): item_text = self.get_item_text(h_child_item) if str(item_text).find("_" + cmode[0:2] + "_") != -1: full_svn_path = self.get_full_path(h_child_item, '|')[0] real_svn_path = str(full_svn_path).replace('|', '/') svn_root_path = full_svn_path[:str(full_svn_path).find('|' )] cache_root_path = Glo.get_local_cache_path( svn_root_path, self.__repositories) if cache_root_path == None: print svn_root_path + ' does not exist in .buildc.rc' sys.exit(Errors.conf_item_not_found) full_cache_path = cache_root_path + '|' + full_svn_path[ str(full_svn_path).find('|') + 1:] real_cache_path = str(full_cache_path).replace('|', '/') real_cache_version_path = real_cache_path[:str( real_cache_path).rfind('/')] real_cache_name_path = real_cache_version_path[:str( real_cache_version_path).rfind('/')] dep_libname = real_cache_name_path[str(real_cache_name_path ).rfind('/') + 1:] dep_libversion = real_cache_version_path[ str(real_cache_version_path).rfind('/') + 1:] (trunk_user, trunk_passwd ) = self.__get_user_and_passwd_by_url(svn_root_path) svn_revision_code = SvnLocalOper.get_svn_info_revision_code( real_svn_path, True, trunk_user, trunk_passwd) if h_child_item.data == 'none': if os.path.exists(real_cache_path): Util.execute_and_return("rm -rf " + real_cache_path) else: if not os.path.exists(real_cache_path): print 'library [' + dep_libname + ' ' + dep_libversion + '] does not exist!' print 'Checkout [' + real_svn_path + ']...' SvnLocalOper.checkout(real_svn_path, real_cache_path, ignore_error, trunk_user, trunk_passwd) print 'Checkout [' + real_svn_path + '] OK!' else: cache_revision_code = SvnLocalOper.get_svn_info_revision_code( real_cache_path, None) if svn_revision_code != cache_revision_code: print 'Update [' + dep_libname + ' ' + dep_libversion + ']...' SvnLocalOper.update(real_cache_path, ignore_error, trunk_user, trunk_passwd) print 'Update [' + dep_libname + ' ' + dep_libversion + '] OK!' h_child_item.data = svn_revision_code self.update_tree(h_child_item, cmode, ignore_error) h_child_item = self.get_next_sibling_item(h_child_item)
@author: cristiano.franco ''' from random import randint from versao2_entities.alimentos import * from versao2_entities.macacos import * from utils.util import Util if __name__ == "__main__": FRUTAS = 0 BANANAS = 1 MACACOS = 2 print("VERSÃO 2 \n\n") _quantidades = Util.interface() # listas de alimentos frutas = [] for i in range(_quantidades[FRUTAS]): frutas.append(Fruta("Fruta")) bananas = [] for i in range(_quantidades[BANANAS]): bananas.append(Banana("Banana")) macacos = Util.geraMacacos2(_quantidades[MACACOS]) alimentos = [] macacoDaVez = None alimentos.append(frutas)
def tableToDictionary(self,currSheet, rowIndex,colIndex): #returns a Cell having endColX and endColY dictColumns = {} # for checking duplicaton if column is duplicated throw exception arrColumns = [] # for making the json. cellValue = Util.cellVal(currSheet, rowIndex, colIndex) # initialize while (colIndex < currSheet.ncols and cellValue != ""): # read the row till empty cell is found it is ok to put a condition on empty cell cellValue = Util.cellVal(currSheet, rowIndex, colIndex) field = None if(cellValue == Rules.idColumnName): field = MField() field.fieldName = Rules.idColumnName field.fieldType = "int" else: field = self.extractField(cellValue) if(not field): print "Sheet::%s Warnning --- Ignore column '%s'" % (currSheet.name,cellValue) colIndex = colIndex + 1 continue anyFieldAlready = Util.getDictValue(dictColumns, field.fieldName) if(anyFieldAlready): raise Exception("Sheet %s The filed %s is already available kindly rename it to something else for this sheet col=(%d) " % (currSheet.name, field.fieldName,colIndex )) dictColumns[field.fieldName] = field.fieldType arrColumns.append(field) #print "field name is %s and field type is %s " % (field.fieldName,str(field.fieldType)) colIndex = colIndex + 1 mainDataDict = {} dataDict = mainDataDict[Rules.dataName] = {} for row_index in xrange(currSheet.nrows): col = 0 if(row_index==0):#skip the first row continue try: idKeyValue = long(float(Util.cellVal(currSheet, row_index, col))) # pick the 0th column value except Exception , e: raise Exception("Exception %s while working with Sheet ('%s') Row_No=%i and Col=%i -------- Celll_Value =%s " % (str(e), currSheet.name,row_index, col, str(Util.cellVal(currSheet, row_index, col)))) currRow = dataDict[str(idKeyValue)] = {} for column in arrColumns: cellValue = Util.cellVal(currSheet, row_index, col) try: cellValue = Util.getCastedValue(cellValue,column.fieldType) except Exception , e: print (str(e)) print "exception in sheet %s in row =%i col=%i " % (currSheet.name,row_index,col) raise Exception("exception %s %s sheet in row =%i col=%i " % (str(e), currSheet.name,row_index,col)) if(cellValue): currRow[column.fieldName] = cellValue col = col + 1
attachmentName = 'attachment; filename="%s" ' % (fileName) web.header('Content-type','application/octet-stream') web.header('Content-transfer-encoding','base64') web.header('Content-Disposition',attachmentName) return result except Exception, e: isError = 1 print "IN Excepetion" Util.printStackTraceInConsole() result = "Failed %s " % str(e) print str("result it %s " % result) finally: Util.deleteTempDir(tempDir) return renderer.uploader(isError,result) def workBookToJson(self,workBook): totalDict = {} for sheetName in workBook.sheet_names(): if sheetName == Rules.metaSheetName: continue # do code here currSheet=workBook.sheet_by_name(sheetName) tableStartCell = self.findTablesInSheet(currSheet)
def run(): info_print('000') info_print('001') info_print('002') info_print('003') info_print('004') info_print('005') info_print('006') info_print('007') input('->') init_inject_func() init_locate() # 信息获取 is_first_time = is_first_time_run() if is_first_time: first_time_run() info_print('107') info_print('108') conf_ini = Conf_ini(locate.get_run_folder()) info_print('109') username, userpwd = conf_ini.get_nexus_account_info() get_cookies_info(username, userpwd) info_print('200') info_print('201') file_page_html, is_spider = get_mod_file_page(conf_ini.is_safe_to_spide()) if is_spider: # 更新最后一次爬虫的时间信息 conf_ini.set_new_last_spide_time() info_print('204_1') last_publish_date, last_download_url = analyze_mod_file_page( file_page_html) info_print('205') print("\t\t\t" + str(last_publish_date)) info_print('206') print("\t\t\t" + last_download_url) last_publish_timeStamp = Util.transform_datetime_to_timeStamp( last_publish_date) installed_version_timeStamp = conf_ini.get_installed_SL_upload_date() if last_publish_timeStamp == installed_version_timeStamp: info_print('207') Util.warning_and_exit() info_print('208') download_page_html = spider_download_file_page(last_download_url) info_print('209') file_id, game_id = analyze_download_file_page(download_page_html) print('\t\tgame_id\t' + game_id, 2) print('\t\tfile id\t' + file_id, 2) info_print('210') download_url, file_type = spider_download_file(file_id, game_id) info_print('211') print("\t\t\t" + download_url) info_print('212') print("\t\t\t" + file_type) info_print('213') dl_loader_location = locate.get_resources_folder( ) + 'StrackerLoader.' + file_type downloadFile(download_url, dl_loader_location, 'cf-files.nexusmods.com') # 英文化!!!!!!!! # info_print('215') if file_type == 'zip': Util.unzip_all(dl_loader_location, locate.get_dl_loader_folder(), '') else: info_print('216') print('\t\t' + file_type) info_print('217') Util.warning_and_exit(1) info_print('218') old_mod_file_list = conf_ini.get_mod_file_list() if len(old_mod_file_list) > 0: info_print('219') for _file in old_mod_file_list: print('\t\t\t' + _file) if Util.is_file_exists(locate.get_mhw_folder() + _file): Util.delete_file(locate.get_mhw_folder() + _file) else: info_print('220') info_print('221') sl_file_list = Util.get_file_list_in_folder(locate.get_dl_loader_folder()) info_print('222') print('\t\t\t' + str(sl_file_list)) info_print('223') for _file in sl_file_list: print('\t\t' + _file) Util.copy_file(locate.get_dl_loader_folder() + _file, locate.get_mhw_folder() + _file) info_print('224') info_print('225') conf_ini.set_installed_SL_upload_date(last_publish_date) info_print('226') conf_ini.set_mod_file_list(sl_file_list) locate.save_to_conf_ini_file() info_print('227') if is_first_time: to_install_VC() print('3DM biss') Util.warning_and_exit(0)
def run_blocks(input_df: pd.DataFrame, blocks: List, y=None, preprocess_block=None, logger=None, filepath: str = "./", task: str = "train", save_feature: bool = False) -> pd.DataFrame: """ Args: input_df (pd.DataFrame): original DataFrame blocks (List): function block y (_type_, optional): _description_. Defaults to None. preprocess_block (_type_, optional): if need preprocessing for example fillna, you need set function of preporcessing logger (_type_, optional): if is not None, output log fie filepath (str, optional): output feature block as pkl. Defaults to "./". task (str, optional): _description_. Defaults to "train". save_feature; create feature as pkl. default=False Returns: pd.DataFrame: feature engined feature """ out_df = pd.DataFrame() if preprocess_block is not None: input_df = preprocess_block(input_df) _input_df = input_df.copy() if save_feature and not os.path.isdir(filepath + "features/"): os.makedirs(filepath + "features") print(decorate(f"start create block for {task}")) with Timer(logger=logger, prefix=f'create {task} block'): for block in blocks: if save_feature: if hasattr(block, "cols"): if hasattr(block, "name") and hasattr( block, "n_components"): file_name = os.path.join( filepath + "features/", f"{task}_{block.__class__.__name__}_{str(block.cols)}_{str(block.name)}_{str(block.n_components)}.pkl" ) else: file_name = os.path.join( filepath + "features/", f"{task}_{block.__class__.__name__}_{str(block.cols)}.pkl" ) else: file_name = os.path.join( filepath + "features/", f"{task}_{block.__class__.__name__}.pkl") with Timer(logger=logger, prefix='\t- {}'.format(str(block))): if save_feature and os.path.isfile(file_name): out_i = Util.load(file_name) else: if task == "train": out_i = block.fit(_input_df) if save_feature: Util.dump(out_i, file_name) else: out_i = block.transform(_input_df) if save_feature: Util.dump(out_i, file_name) assert len(input_df) == len(out_i), block name = block.__class__.__name__ out_df = pd.concat([out_df, out_i.add_suffix(f'@{name}')], axis=1) return out_df
class Glo(object): default_includes = '' default_libs = '' DOT_BUILDC_RC = Util.usr_home() + os.sep + '.buildc.rc' DOT_BUILDC_REPOSITORY = Util.usr_home() + os.sep + '.buildc.repository' BUILDC_CFG_PATH = './buildc.cfg' MAKE_RULES_PATH = './Make.rules' MAKE_RULES_TEMP_PATH = './.Make.rules' core_home = os.path.dirname(os.path.realpath(__file__)) buildc_home = core_home[:str(core_home).rfind(os.sep)] MAKERULES_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'Make.rules.in' SETUP_CFG_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'setup.cfg.in' SETUP_PY_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'setup.py.in' LAYOUT_CFG_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'layout.cfg.in' CHECKC_CFG_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'checkc.cfg.in' ENV_GEN_PY_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'env_gen.py.in' BUILDC_CFG_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'buildc.cfg.in' BUILDC_RC_TPL_PATH = buildc_home + os.sep + 'templates' + os.sep + 'buildc.rc.in' ONE_TUPLE = 1 TWO_TUPLE = 2 THREE_TUPLE = 3 BIT32 = '32-bit' BIT64 = '64-bit' CPU = SystemLocalInfo.cpu() SYSTEM = SystemLocalInfo.system() PACK_SUFFIX = "" VAR_STR = "" SOURCE_SVN_USER = None SOURCE_SVN_PASSWD = None @staticmethod def var_str(): return Glo.VAR_STR @staticmethod def source_svn_user(): return Glo.SOURCE_SVN_USER @staticmethod def source_svn_passwd(): return Glo.SOURCE_SVN_PASSWD @staticmethod def dot_buildc_rc_path(): return Glo.DOT_BUILDC_RC @staticmethod def dot_buildc_repository_path(): return Glo.DOT_BUILDC_REPOSITORY @staticmethod def buildc_cfg_path(): return Glo.BUILDC_CFG_PATH @staticmethod def make_rules_path(): return Glo.MAKE_RULES_PATH @staticmethod def make_rules_temp_path(): return Glo.MAKE_RULES_TEMP_PATH @staticmethod def make_rules_tpl_path(): return Glo.MAKERULES_TPL_PATH @staticmethod def setup_cfg_tpl_path(): return Glo.SETUP_CFG_TPL_PATH @staticmethod def setup_py_tpl_path(): return Glo.SETUP_PY_TPL_PATH @staticmethod def layout_cfg_tpl_path(): return Glo.LAYOUT_CFG_TPL_PATH @staticmethod def checkc_cfg_tpl_path(): return Glo.CHECKC_CFG_TPL_PATH @staticmethod def env_gen_py_tpl_path(): return Glo.ENV_GEN_PY_TPL_PATH @staticmethod def buildc_cfg_tpl_path(): return Glo.BUILDC_CFG_TPL_PATH @staticmethod def buildc_rc_tpl_path(): return Glo.BUILDC_RC_TPL_PATH @staticmethod def add_backlash(string): return string.replace("/", "\/") @staticmethod def libname2compile_option(libname): return ((libname.replace("lib", "", 1)).replace(".a", "")).replace(".so", "") @staticmethod def is_static_lib(libfilename): return libfilename.endswith(".a") @staticmethod def get_dependent_name_and_version(dependence): dep_libname = None dep_libversion = None dep_tagfile = None if len(dependence) == Glo.THREE_TUPLE: (dep_libname, dep_libversion, dep_tagfile) = dependence elif len(dependence) == Glo.TWO_TUPLE: (dep_libname, dep_libversion) = dependence else: print 'dependence args invalid' sys.exit(Errors.args_invalid) return (dep_libname, dep_libversion, dep_tagfile) @staticmethod def get_repository(item): repository = None if len(item) == Glo.ONE_TUPLE: if isinstance(item[0], tuple): repository = item[0] else: repository = item elif len(item) == Glo.TWO_TUPLE: if isinstance(item[0], tuple): repository = item[0] else: repository = item else: print 'tuple number invalid in .buildc.rc' sys.exit(Errors.tuple_number_invalid) return repository @staticmethod def get_local_cache_path(svn_path, repositories): url = None cache_path = None for item in repositories: repository = Glo.get_repository(item) if len(repository) == Glo.ONE_TUPLE: url = repository[0] if svn_path == url: cache_path = '~/buildc_libs/' cache_path += url[str(url).rfind('/') + 1:] return os.path.abspath(os.path.expanduser(cache_path)) elif len(repository) == Glo.TWO_TUPLE or len( repository) == Glo.THREE_TUPLE: (url, cache_path) = (repository[0], repository[1]) if svn_path == url: return os.path.abspath(os.path.expanduser(cache_path)) else: print 'tuple number invalid in .buildc.rc' sys.exit(Errors.tuple_number_invalid) return None
def transform(self, inputDir, outputDir): files = [] inputDir = inputDir.rstrip(os.sep) for fileName in os.listdir(inputDir): if fileName.lower().endswith('.csv'): files.append(inputDir + os.sep + fileName) files.sort() #read logger.info("读取csv开始...") for f in files: if sys.platform == 'win32': logger.info("读取csv", f.decode('GBK').encode('UTF-8')) else: logger.info("读取csv", f) data = CsvReader(f).readData() if data is None: logger.error("读取csv失败!") return False if not data[Util.CONST_DATA_KEY]: exportName, head, rawData = self._transformData(data) exportData = self.exportDatas.setdefault( exportName, [[], [], []]) else: exportName, head, rawData = self._transformConstData(data) exportData = self.exportConstDatas.setdefault( exportName, [[], [], []]) exportData[0].append(head) exportData[1].append(rawData) exportData[2].append(head[Util.FILE_IDENTIFY]) if not Util.TABLE_CACHE.has_key(head[Util.FILE_IDENTIFY]): Util.TABLE_CACHE[head[Util.FILE_IDENTIFY]] = ( head, rawData, ) else: logger.error("存在重复数据表", head[Util.FILE_IDENTIFY]) return False logger.info("读取csv完成!") #check logger.info("检查数据表开始...") isSuccess = True for exportName, exportData in self.exportDatas.iteritems(): logger.info("检查数据表", Util.value2pystr(exportData[2])) ruleManager = RuleManager() ruleManager.createRules(exportData[0], exportData[2]) if not ruleManager.checkFields(exportData[1]): logger.error("数据错误!") isSuccess = False if isSuccess: logger.info("检查数据表完成:全部正确!") else: logger.error("检查数据表完成:发生错误!") return False #write code logger.info("导出代码开始...") for exportName, exportData in self.exportDatas.iteritems(): logger.info("导出代码", exportName) ret = DataWriter(exportData[0], exportData[1], outputDir, exportName).writeFile() if not ret: logger.error("导出代码失败!") return False for exportName, exportData in self.exportConstDatas.iteritems(): ret = ConstDataWriter(exportData[0], exportData[1], outputDir, exportName).writeFile() if not ret: logger.error("导出代码失败!") return False logger.info("导出代码完成!") #write char set # logger.info("导出字符集开始...") # for charName, charSet in Util.CHAR_SET_MAP.iteritems(): # if not charSet: # continue # content = "" # for char in sorted(charSet): # char = char.encode('utf-8') # if char and char != ' ': # content += char # if content: # Writer(outputDir, charName).writeChar(content) # logger.info("导出字符集完成!") return True
def reconfig(cmode, libs_depend, project_root_path = None): makerules = Glo.make_rules_path() if (Glo.SYSTEM == 'solaris'): this_awk = 'nawk' else: this_awk = 'gawk' print "Reconfig [" + makerules + "]..." #Warning if we can not found '@Generated by buildc@' embeded in the Make.rules f = open(makerules) s = f.read(1024) if s.find("@Generated by buildc@") == -1: print "Warning: Please make sure the Make.rules file is generated by buildc!" f.close() c = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str()) project_name, version, author = c.project if project_root_path == None: topdir = os.path.abspath('./') else: topdir = project_root_path this_os = Glo.SYSTEM this_cpu = Glo.CPU cmode = cmode if cmode == '64-bit': if this_os == 'solaris' and this_cpu == 'x86': cc = '/usr/sfw/bin/gcc -m64' else: cc = 'gcc -m64' else: cc = 'gcc -m32' libs = '' includes = '' static_libs = '' dynamic_libs = '' lib_roots = '' lib_roots_count = len(libs_depend) if not lib_roots_count == 0: last_lib = libs_depend[lib_roots_count - 1] for (libname, libversion, archives, libpath) in libs_depend: if libname == last_lib[0]: lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots_end@") else: lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots@\\n") includes += ('-I $(' + libname + '_ROOT)' + '/include ') libs += (' -L $(' + libname + '_ROOT)' + '/lib') for archive in archives: libs += (' -l' + Glo.libname2compile_option(archive)) if Glo.is_static_lib(archive): static_libs += (' -L $(' + libname + '_ROOT)' + '/lib') static_libs += (' -l' + Glo.libname2compile_option(archive)) else: dynamic_libs += (' -L $(' + libname + '_ROOT)' + '/lib') dynamic_libs += (' -l' + Glo.libname2compile_option(archive)) custom_defs = '' for cdef in c.custom_defs: custom_defs += (cdef + ' ') custom_vars = '' custom_vars_count = len(c.custom_vars) for var in c.custom_vars: custom_vars += (var[0] + ' = ' + str(var[1])) if var == c.custom_vars[custom_vars_count - 1]: custom_vars += "#@custom_vars_end@" else: custom_vars += "#@custom_vars@\\n" custom_includes = '' for inc in c.custom_includes: custom_includes += ('-I ' + inc + ' ') custom_libs = '' for (libpath, archives) in c.custom_libs: if not len(libpath) == 0: custom_libs += (' -L' + libpath) for archive in archives: custom_libs += (' -l' + Glo.libname2compile_option(archive)) if Glo.is_static_lib(archive): if not len(libpath) == 0: static_libs += (' -L ' + libpath) static_libs += (' -l' + Glo.libname2compile_option(archive)) else: if not len(libpath) == 0: dynamic_libs += (' -L ' + libpath) dynamic_libs += (' -l' + Glo.libname2compile_option(archive)) system_libs = '' for (libpath, archives) in c.system_libs: if not len(libpath) == 0: system_libs += (' -L ' + libpath) for archive in archives: system_libs += (' -l' + Glo.libname2compile_option(archive)) cmd = "sed -e '1,$ s/=.*@topdir@/= " + Glo.add_backlash(topdir) + "#@topdir@/g' " + Glo.make_rules_path() + '|' cmd += "sed -e '1,$ s/=.*@os@/= " + this_os + "#@os@/g'" + '|' cmd += "sed -e '1,$ s/=.*@cpu@/= " + this_cpu + "#@cpu@/g'" + '|' cmd += "sed -e '1,$ s/=.*@cmode@/= " + cmode + "#@cmode@/g'" + '|' cmd += "sed -e '1,$ s/=.*@version@/= " + version + "#@version@/g'" + '|' cmd += "sed -e '1,$ s/=.*@cc@/= " + Glo.add_backlash(cc) + "#@cc@/g'" + '|' cmd += "sed -e '1,$ s/=.*@default_includes@/= " + Glo.add_backlash(Glo.default_includes) + "#@default_includes@/g'" + '|' cmd += "sed -e '1,$ s/=.*@default_libs@/= " + Glo.add_backlash(Glo.default_libs) + "#@default_libs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@custom_includes@/= " + Glo.add_backlash(custom_includes) + "#@custom_includes@/g'" + '|' cmd += "sed -e '1,$ s/=.*@custom_libs@/= " + Glo.add_backlash(custom_libs) + "#@custom_libs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@system_libs@/= " + Glo.add_backlash(system_libs) + "#@system_libs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@static_libs@/= " + Glo.add_backlash(static_libs) + "#@static_libs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@dynamic_libs@/= " + Glo.add_backlash(dynamic_libs) + "#@dynamic_libs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@custom_defs@/= " + custom_defs + "#@custom_defs@/g'" + '|' cmd += "sed -e '1,$ s/=.*@lib_includes@/= " + Glo.add_backlash(includes) + "#@lib_includes@/g'" + '|' cmd += "sed -e '1,$ s/=.*@libs_depend@/= " + Glo.add_backlash(libs) + "#@libs_depend@/g'" + '|' cmd += "sed -e '/^.*@lib_roots@/d'" + '|' cmd += "sed -e '1,$ s/^.*@lib_roots_end@/@lib_roots@/g'" + '|' cmd += "sed -e '/^.*@custom_vars@/d'" + '|' cmd += "sed -e '1,$ s/^.*@custom_vars_end@/@custom_vars@/g'" + '|' if lib_roots_count == 0: cmd += ("sed -e '1,$ s/@lib_roots@/#@lib_roots_end@/g'" + '|') else: cmd += (this_awk + " '{ sub(/@lib_roots@/, \"" + lib_roots + "\"); print }'" + '|') if custom_vars_count == 0: cmd += ("sed -e '1,$ s/@custom_vars@/#@custom_vars_end@/g'") else: cmd += (this_awk + " '{ sub(/@custom_vars@/, \"" + custom_vars + "\"); print }'") cmd += "> " + Glo.make_rules_temp_path() Util.execute_and_output(cmd) Util.execute_and_output('mv ' + Glo.make_rules_temp_path() + ' ' + Glo.make_rules_path()) print "Reconfig [" + makerules + "] OK!"
def pack_create(project_path): if project_path == None: print "Error: no project name is given!" sys.exit(Errors.args_invalid) if os.path.exists(project_path): print "Error: [" + project_path + "] has already existed!" sys.exit(Errors.file_or_dir_exists) Util.execute_and_output('mkdir -p ' + project_path) Util.execute_and_output('cp ' + Glo.setup_cfg_tpl_path() + ' ' + project_path + os.sep + 'setup.cfg') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'distributions') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'src' + os.sep + 'app') Util.execute_and_output('cp ' + Glo.setup_py_tpl_path() + ' ' + project_path + os.sep + 'src' + os.sep + 'setup.py') Util.execute_and_output('cp ' + Glo.layout_cfg_tpl_path() + ' ' + project_path + os.sep + 'src' + os.sep + 'layout.cfg') Util.execute_and_output('touch ' + project_path + os.sep + 'src' + os.sep + 'README') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'src' + os.sep + 'deps') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'src' + os.sep + 'conf') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'src' + os.sep + 'others') Util.execute_and_output('mkdir -p ' + project_path + os.sep + 'src' + os.sep + 'scripts') Util.execute_and_output('cp ' + Glo.checkc_cfg_tpl_path() + ' ' + project_path + os.sep + 'src' + os.sep + 'scripts' + os.sep + 'deps_check.py') Util.execute_and_output('cp ' + Glo.env_gen_py_tpl_path() + ' ' + project_path + os.sep + 'src' + os.sep + 'scripts' + os.sep + 'env_gen.py') Util.execute_and_output('touch ' + project_path + os.sep + 'src' + os.sep + 'scripts' + os.sep + '__init__.py') return True
def is_first_time_run(): ''' @summary: 根据conf.ini文件是否存在,判断是否是第一次运行 @return: bool ''' return not Util.is_file_exists(locate.get_run_folder() + 'conf.ini')
def get_cookies_by_selenium_login(user_name, user_password): ''' @summary: 通过selenium获取cookies信息,并记录下来,返回 @return: cookies:dict ''' driver = init_selenium_driver() # 登录界面 driver.get('https://users.nexusmods.com/auth/sign_in') Util.info_print('请在页面中登录N网账户', 3) Util.info_print('如果设置在conf.ini的账户密码正确,这个过程会自动完成。', 3) Util.info_print('如果不正确,请手动输入账户密码', 3) Util.info_print('每一步操作都设置了30s的可行时间,超过时间程序就会退出', 3) # 登录界面 username_inputer = driver.find_element_by_id("user_login") username_inputer.send_keys(user_name) userpassword_inputer = driver.find_element_by_id("password") userpassword_inputer.send_keys(user_password) commit_button = driver.find_element_by_xpath('//input[@type="submit"]') commit_button.click() while driver.current_url == "https://users.nexusmods.com/auth/sign_in": time.sleep(1) # 欢迎界面 try: index_a = WebDriverWait(driver, 30).until( EC.presence_of_element_located( (By.XPATH, '//div[@class="links"]/div[@class="left-link"]/a[1]'))) index_a.click() finally: Util.info_print('等待进入首页,请勿操作', 3) # 返回首页后 while driver.current_url != "https://www.nexusmods.com/": time.sleep(1) Util.info_print('等待从首页中获取cookies', 3) try: WebDriverWait(driver, 30).until( EC.presence_of_element_located( (By.XPATH, '/html/body/header[1]/div[1]/div[2]/div/div/div[3]/div[2]'))) finally: nexus_cookies_list = driver.get_cookies() nexus_cookies = dict() for cookie in nexus_cookies_list: nexus_cookies[cookie['name']] = cookie['value'] with open(cookies_json_location, 'w', encoding="utf-8") as f: json.dump(nexus_cookies, f) driver.close() return nexus_cookies
def remove_tree(self, item, cmode): if (item == None): h_child_item = self.get_root_item() else: h_child_item = self.get_child_item(item) while (h_child_item != None): if (self.get_child_item(h_child_item) == None): item_text = self.get_item_text(h_child_item) if str(item_text).find("_" + cmode[0:2] + "_") != -1: full_svn_path = self.get_full_path(h_child_item, '|')[0] svn_root_path = full_svn_path[:str(full_svn_path).find('|' )] cache_root_path = Glo.get_local_cache_path( svn_root_path, self.__repositories) if cache_root_path == None: print svn_root_path + ' does not exist in .buildc.rc' sys.exit(Errors.conf_item_not_found) full_cache_path = cache_root_path + '|' + full_svn_path[ str(full_svn_path).find('|') + 1:] real_cache_path = str(full_cache_path).replace('|', '/') real_cache_version_path = real_cache_path[:str( real_cache_path).rfind('/')] real_cache_name_path = real_cache_version_path[:str( real_cache_version_path).rfind('/')] if h_child_item.data == 'none': if os.path.exists(real_cache_path): ret = Util.execute_and_return("rm -rf " + real_cache_path + "/.svn " + real_cache_path) if (ret != 0): print 'Remove [' + real_cache_path + '] Failed!' sys.exit(ret) else: print 'Remove [' + real_cache_path + '] OK!' else: if not os.path.exists(real_cache_path): pass else: ret = Util.execute_and_return("rm -rf " + real_cache_path + "/.svn " + real_cache_path) if (ret != 0): print 'Remove [' + real_cache_path + '] Failed!' sys.exit(ret) else: print 'Remove [' + real_cache_path + '] OK!' if os.path.exists(real_cache_version_path): if len(os.listdir(real_cache_version_path)) == 0: os.rmdir(real_cache_version_path) if os.path.exists(real_cache_name_path): if len(os.listdir(real_cache_name_path)) == 0: os.rmdir(real_cache_name_path) if os.path.exists(cache_root_path): if len(os.listdir(cache_root_path)) == 0: os.rmdir(cache_root_path) h_child_item.data = 'none' self.remove_tree(h_child_item, cmode) h_child_item = self.get_next_sibling_item(h_child_item)
''' # here put the import lib import time from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium import webdriver import json import os from utils.util import Util # from util import Util cookies_json_location = Util.get_resources_folder() + 'Nexus_Cookies.txt' def init_selenium_chrome_driver(): ''' @summary: 配置selenium.webdriver chrome ''' chromedriver = Util.get_lib_folder() + "chromedriver.exe" drivePath = os.path.join(os.path.dirname(__file__), chromedriver) options = webdriver.ChromeOptions() # 禁止图片加载 prefs = {"profile.managed_default_content_settings.images": 2} options.add_experimental_option("prefs", prefs) # 不显示图片 options.add_argument('--blink-settings=imagesEnabled=false') # 非沙盒模式
def build_dependent(self, dep_libname, dep_libversion, cmode, force_update=True): lib_flag = False h_child_item = self.get_root_item() while (h_child_item != None): svn_root_path = self.get_item_text(h_child_item) full_svn_path = svn_root_path + '|' + dep_libname + '|' + dep_libversion + '|' + Glo.CPU + '_' + cmode[ 0:2] + '_' + Glo.SYSTEM real_svn_path = str(full_svn_path).replace('|', '/') leaf_node = self.find_item(full_svn_path, '|', False, 1) if leaf_node != None: lib_flag = True cache_root_path = Glo.get_local_cache_path( svn_root_path, self.__repositories) if cache_root_path == None: print svn_root_path + ' does not exist in .buildc.rc' sys.exit(Errors.conf_item_not_found) full_cache_path = cache_root_path + '|' + dep_libname + '|' + dep_libversion + '|' + Glo.CPU + '_' + cmode[ 0:2] + '_' + Glo.SYSTEM real_cache_path = str(full_cache_path).replace('|', '/') (trunk_user, trunk_passwd ) = self.__get_user_and_passwd_by_url(svn_root_path) svn_revision_code = SvnLocalOper.get_svn_info_revision_code( real_svn_path, True, trunk_user, trunk_passwd) if svn_revision_code == "": return False if leaf_node.data == 'none': if os.path.exists(real_cache_path): Util.execute_and_return("rm -rf " + real_cache_path) print 'library [' + dep_libname + ' ' + dep_libversion + '] does not exist!' print 'Checkout [' + real_svn_path + ']...' result = SvnLocalOper.checkout(real_svn_path, real_cache_path, True, trunk_user, trunk_passwd) if result == False: return False print 'Checkout [' + real_svn_path + '] OK!' leaf_node.data = svn_revision_code else: if not os.path.exists(real_cache_path): print 'library [' + dep_libname + ' ' + dep_libversion + '] does not exist!' print 'Checkout [' + real_svn_path + ']...' result = SvnLocalOper.checkout(real_svn_path, real_cache_path, True, trunk_user, trunk_passwd) if result == False: return False print 'Checkout [' + real_svn_path + '] OK!' leaf_node.data = svn_revision_code else: if force_update: cache_revision_code = SvnLocalOper.get_svn_info_revision_code( real_cache_path, True) if cache_revision_code == "": return False if svn_revision_code != cache_revision_code: print 'Update [' + dep_libname + ' ' + dep_libversion + ']...' result = SvnLocalOper.update( real_cache_path, True, trunk_user, trunk_passwd) if result == False: return False print 'Update [' + dep_libname + ' ' + dep_libversion + '] OK!' leaf_node.data = svn_revision_code else: print "Do not use force_update, skip the update check!" pass break h_child_item = self.get_next_sibling_item(h_child_item) if lib_flag == True: return True else: return False
def run(): print("本程序由Recluse制作") print("本程序用于一键更新前置MOD-StrackerLoader") print("本程序不会用于盗号, 偷取信息 等非法操作") print("但由于源码是公开的, 可能存在被魔改成盗号程序的可能。故建议从github获取本程序。") print("github地址:https://github.com/RecluseXU/CheckStrackerLoader") print("输入回车键开始") input() Util.info_print('初始化') Util.info_print('创建resources目录', 1) location = Util.get_resources_folder()[:-1] Util.creat_a_folder(location) Util.info_print('创建lib目录', 1) location = Util.get_lib_folder()[:-1] Util.creat_a_folder(location) # 信息获取 Util.info_print("获取本地信息") Util.info_print('尝试获取 MHW 目录', 1) MHW_Install_Address = Util.get_MHW_Install_Address() Util.info_print('MHW 目录:\t'+MHW_Install_Address, 2) Util.info_print('尝试获取当前目录', 1) run_folder_location = Util.get_run_folder() Util.info_print('当前目录:\t'+run_folder_location, 2) Util.info_print('检查StrackerLoader安装状态', 1) is_installed = Util.is_file_exists(MHW_Install_Address+'dinput8.dll') Util.info_print('安装状态:\t'+str(is_installed), 2) if is_installed: Util.info_print('尝试获取 StrackerLoader-dinput8.dll 的 MD5', 2) dinput8_dll_md5 = Util.get_file_MD5(MHW_Install_Address+'dinput8.dll') else: dinput8_dll_md5 = "" Util.info_print('尝试获取 conf.ini信息', 1) if not Util.is_file_exists(run_folder_location+'conf.ini'): Util.info_print('conf.ini不存在,创建conf.ini', 2) print('这次输入的信息会记录在conf.ini中,如果需要更改,用记事本修改conf.ini的内容即可') N_name = input('请输入N网账号或邮箱:') N_pwd = input('请输N网密码:') Conf_ini.creat_new_conf_ini(run_folder_location+'conf.ini', dinput8_dll_md5, N_name, N_pwd) Util.info_print('读取conf.ini', 2) conf_ini = Conf_ini(run_folder_location) Util.info_print('尝试获取 Cookies 信息', 1) username, userpwd = conf_ini.get_nexus_account_info() get_cookies_info(run_folder_location, username, userpwd) # Util.info_print("获取MOD信息") Util.info_print('尝试获取N网 "Stracker\'s Loader" 文件信息页', 1) file_page_html, is_spider = get_mod_file_page(conf_ini.is_safe_to_spide()) if is_spider: # 更新最后一次爬虫的时间信息 conf_ini.set_new_last_spide_time() Util.info_print(r'尝试分析文件页,得到 "Stracker\'s Loader" 最新版信息', 1) last_publish_date, last_download_url = analyze_mod_file_page(file_page_html) Util.info_print("最新版本上传日期\t" + str(last_publish_date), 2) Util.info_print("最新版本下载地址\t" + last_download_url, 2) last_publish_timeStamp = Util.transform_datetime_to_timeStamp(last_publish_date) installed_version_timeStamp = conf_ini.get_installed_SL_upload_date() if is_installed and last_publish_timeStamp == installed_version_timeStamp: Util.info_print("已安装的版本与最新版发布时间一致,无需更新") Util.warning_and_exit() Util.info_print('尝试获取N网 "Stracker\'s Loader" 最新版文件下载页', 1) download_page_html = spider_download_file_page(last_download_url) Util.info_print('尝试分析N网 "Stracker\'s Loader" 最新版文件下载页', 1) file_id, game_id = analyze_download_file_page(download_page_html) Util.info_print('game_id\t'+game_id, 2) Util.info_print('file id\t'+file_id, 2) Util.info_print('尝试获取N网 "Stracker\'s Loader" 最新版文件下载url', 1) download_url, file_type = spider_download_file(file_id, game_id) Util.info_print("最新版文件下载url\t" + download_url, 2) Util.info_print("最新版文件类型\t" + file_type, 2) Util.info_print('尝试下载"Stracker\'s Loader" 最新版文件', 1) location = Util.get_resources_folder() + 'StrackerLoader.' + file_type downloadFile(download_url, location) Util.info_print("信息处理") Util.info_print('尝试解压"Stracker\'s Loader" 文件', 1) downloaded_mod_location = Util.get_resources_folder() + 'StrackerLoader.' + file_type downloaded_mod_unpack_location = Util.get_resources_folder() + 'StrackerLoade\\' if file_type == 'zip': Util.unzip_all(downloaded_mod_location, downloaded_mod_unpack_location, '') Util.info_print('尝试获取刚下载的"Stracker\'s Loader" 文件MD5', 1) download_dll_location = Util.get_resources_folder() + '\\StrackerLoade\\dinput8.dll' download_dll_md5 = Util.get_file_MD5(download_dll_location) Util.info_print('刚下载的"Stracker\'s Loader" dll-MD5:\t' + download_dll_md5, 2) if is_installed and conf_ini.get_installed_mod_ddl_md5() == download_dll_md5: Util.info_print('刚下载MD5 与 已安装MD5一致,无需更新', 2) Util.info_print('更新 已安装版本DLL的MD5 信息', 3) conf_ini.set_installed_mod_ddl_md5(download_dll_md5) Util.info_print('更新 已安装版本N网作者上传时间信息', 3) conf_ini.set_installed_SL_upload_date(last_publish_date) Util.warning_and_exit() Util.info_print('尝试覆盖安装', 1) Util.info_print('覆盖安装dinput8.dll', 2) mhw_ddl_location = MHW_Install_Address+'dinput8.dll' Util.copy_file(download_dll_location, mhw_ddl_location) Util.info_print('覆盖安装dinput-config.json', 2) download_dinputconfig_location = Util.get_resources_folder() + '\\StrackerLoade\\dinput-config.json' mhw_dinputconfig_location = MHW_Install_Address + 'dinput-config.json' Util.copy_file(download_dinputconfig_location, mhw_dinputconfig_location) Util.info_print('更新安装信息', 2) Util.info_print('更新 已安装版本N网作者上传时间信息', 3) conf_ini.set_installed_SL_upload_date(last_publish_date) Util.info_print('更新 已安装版本DLL的MD5 信息', 3) conf_ini.set_installed_mod_ddl_md5(download_dll_md5) print('程序运行完毕 3DM biss') Util.warning_and_exit(0)
def harddisk_free_size(): size = 0 system_name = SystemLocalInfo.system() s = Util.execute_and_output("df -h | sed \'1d\'") step = 6 unit = 1024 s_list = re.split('[\t\n]?\s\s*|\n', s) if len(s_list) % step != 0: print "error: the length of the list is invalid." sys.exit(Errors.logical_errors) if system_name == 'linux': index = 3 count = len(s_list) / step size = 0 i = 0 while (i < count): if str(s_list[index])[-1] == "G": size += float(str(s_list[index])[:-1]) * unit * unit elif str(s_list[index])[-1] == "M": size += float(str(s_list[index])[:-1]) * unit elif str(s_list[index])[-1] == "K": size += float(str(s_list[index])[:-1]) else: if str(s_list[index]) == "0": size += 0 else: print "error: the list element is invalid." sys.exit(Errors.logical_errors) index += step i += 1 size = int(size / unit / unit) return size elif system_name == 'solaris': index = 3 count = len(s_list) / step size = 0 swap_num = 0 i = 0 while (i < count): if str(s_list[index-3]).startswith("/dev/dsk") == False and \ str(s_list[index-3]) != "swap": index += step i += 1 continue if str(s_list[index - 3]) == "swap": swap_num += 1 if swap_num > 1: index += step i += 1 continue if str(s_list[index])[-1] == "G": size += float(str(s_list[index])[:-1]) * unit * unit elif str(s_list[index])[-1] == "M": size += float(str(s_list[index])[:-1]) * unit elif str(s_list[index])[-1] == "K": size += float(str(s_list[index])[:-1]) else: if str(s_list[index]) == "0": size += 0 else: print "error: the list element is invalid." sys.exit(Errors.logical_errors) index += step i += 1 size = int(size / unit / unit) return size else: return -1
def __config(cmode, libs_depend, project_root_path = None): makerules_tpl = Glo.make_rules_tpl_path() if (Glo.SYSTEM == 'solaris'): this_awk = 'nawk' else: this_awk = 'gawk' c = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str()) project_name, version, author = c.project date = time.strftime('%Y-%m-%d',time.localtime(time.time())) if project_root_path == None: topdir = os.path.abspath('./') else: topdir = project_root_path this_os = Glo.SYSTEM this_cpu = Glo.CPU cmode = cmode if cmode == '64-bit': if this_os == 'solaris' and this_cpu == 'x86': cc = '/usr/sfw/bin/gcc -m64' else: cc = 'gcc -m64' else: cc = 'gcc -m32' libs = '' includes = '' static_libs = '' dynamic_libs = '' lib_roots = '' lib_roots_count = len(libs_depend) if not lib_roots_count == 0: last_lib = libs_depend[lib_roots_count - 1] for (libname, libversion, archives, libpath) in libs_depend: if libname == last_lib[0]: lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots_end@") else: lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots@\\n") includes += ('-I $(' + libname + '_ROOT)' + '/include ') libs += (' -L $(' + libname + '_ROOT)' + '/lib') for archive in archives: libs += (' -l' + Glo.libname2compile_option(archive)) if Glo.is_static_lib(archive): static_libs += (' -L $(' + libname + '_ROOT)' + '/lib') static_libs += (' -l' + Glo.libname2compile_option(archive)) else: dynamic_libs += (' -L $(' + libname + '_ROOT)' + '/lib') dynamic_libs += (' -l' + Glo.libname2compile_option(archive)) custom_defs = '' for cdef in c.custom_defs: custom_defs += (cdef + ' ') custom_vars = '' custom_vars_count = len(c.custom_vars) for var in c.custom_vars: custom_vars += (var[0] + ' = ' + str(var[1])) if var == c.custom_vars[custom_vars_count - 1]: custom_vars += "#@custom_vars_end@" else: custom_vars += "#@custom_vars@\\n" custom_includes = '' for inc in c.custom_includes: custom_includes += ('-I ' + inc + ' ') custom_libs = '' for (libpath, archives) in c.custom_libs: if not len(libpath) == 0: custom_libs += (' -L ' + libpath) for archive in archives: custom_libs += (' -l' + Glo.libname2compile_option(archive)) if Glo.is_static_lib(archive): if not len(libpath) == 0: static_libs += (' -L ' + libpath) static_libs += (' -l' + Glo.libname2compile_option(archive)) else: if not len(libpath) == 0: dynamic_libs += (' -L ' + libpath) dynamic_libs += (' -l' + Glo.libname2compile_option(archive)) system_libs = '' for (libpath, archives) in c.system_libs: if not len(libpath) == 0: system_libs += (' -L ' + libpath) for archive in archives: system_libs += (' -l' + Glo.libname2compile_option(archive)) cmd = "sed -e '1,$ s/@project_name@/" + project_name + "/g' " + makerules_tpl + '|' cmd += "sed -e '1,$ s/@author@/" + author + "/g'" + '|' cmd += "sed -e '1,$ s/@date@/" + date + "/g'" + '|' cmd += "sed -e '1,$ s/@topdir@/" + Glo.add_backlash(topdir) + "#@topdir@/g'" + '|' cmd += "sed -e '1,$ s/@os@/" + this_os + "#@os@/g'" + '|' cmd += "sed -e '1,$ s/@cpu@/" + this_cpu + "#@cpu@/g'" + '|' cmd += "sed -e '1,$ s/@cmode@/" + cmode + "#@cmode@/g'" + '|' cmd += "sed -e '1,$ s/@version@/" + version + "#@version@/g'" + '|' cmd += "sed -e '1,$ s/@cc@/" + Glo.add_backlash(cc) + "#@cc@/g'" + '|' cmd += "sed -e '1,$ s/@default_includes@/" + Glo.add_backlash(Glo.default_includes) + "#@default_includes@/g'" + '|' cmd += "sed -e '1,$ s/@default_libs@/" + Glo.add_backlash(Glo.default_libs) + "#@default_libs@/g'" + '|' cmd += "sed -e '1,$ s/@custom_defs@/" + custom_defs + "#@custom_defs@/g'" + '|' cmd += "sed -e '1,$ s/@custom_includes@/" + Glo.add_backlash(custom_includes) + "#@custom_includes@/g'" + '|' cmd += "sed -e '1,$ s/@custom_libs@/" + Glo.add_backlash(custom_libs) + "#@custom_libs@/g'" + '|' cmd += "sed -e '1,$ s/@system_libs@/" + Glo.add_backlash(system_libs) + "#@system_libs@/g'" + '|' cmd += "sed -e '1,$ s/@static_libs@/" + Glo.add_backlash(static_libs) + "#@static_libs@/g'" + '|' cmd += "sed -e '1,$ s/@dynamic_libs@/" + Glo.add_backlash(dynamic_libs) + "#@dynamic_libs@/g'" + '|' cmd += "sed -e '1,$ s/@lib_includes@/" + Glo.add_backlash(includes) + "#@lib_includes@/g'" + '|' cmd += "sed -e '1,$ s/@libs_depend@/" + Glo.add_backlash(libs) + "#@libs_depend@/g'" + '|' if lib_roots_count == 0: cmd += ("sed -e '1,$ s/@lib_roots@/#@lib_roots_end@/g'" + '|') else: cmd += (this_awk + " '{ sub(/@lib_roots@/, \"" + lib_roots + "\"); print }'" + '|') if custom_vars_count == 0: cmd += ("sed -e '1,$ s/@custom_vars@/#@custom_vars_end@/g'") else: cmd += (this_awk + " '{ sub(/@custom_vars@/, \"" + custom_vars + "\"); print }'") cmd += "> " + Glo.make_rules_path() Util.execute_and_output(cmd)
def get_cookies_info(run_location, user_name, user_pwd): ''' @summary: 获取cookies信息 @return: cookies:dict ''' nexus_cookies_location = Util.get_resources_folder() + "Nexus_Cookies.txt" if Util.is_file_exists(nexus_cookies_location): Util.info_print('Nexus_Cookies.json存在', 2) Util.info_print('尝试通过Nexus_Cookies.json获取Cookies信息', 3) my_cookies = get_cookies_from_file() # print(my_cookies) if is_login(my_cookies): Util.info_print('Cookies信息验证成功,', 4) return else: Util.info_print('Cookies信息验证失败,', 4) Util.info_print('尝试通过登录N网记录Cookies信息', 2) my_cookies = get_cookies_by_selenium_login(user_name, user_pwd) if my_cookies is not None: return my_cookies Util.info_print('尝试通过手动输入, 获知Cookies信息', 2) my_cookies = get_cookies_by_input() if is_login(my_cookies): Util.info_print('Cookies信息验证成功,', 4) else: Util.info_print('Cookies信息验证失败,', 4) Util.warning_and_exit(1) return my_cookies
from utils.util import Util import random import sys import logging from datetime import datetime __UTIL = Util() __LOGGER = logging.getLogger("DataGeneratorLogger") __NAMES = __UTIL.load_base("names") __ADDRESS = __UTIL.load_base("address") __CITIES = __UTIL.load_base("city") def generate(cpf_lines): print( f"Starting Data Generator Application for {cpf_lines} records at {datetime.now()}" ) f = open("../../../output/generated.csv", "a+") header = "id;name;cpf;address;city;state;parent_name;parent_document;parent_city;parent_state;" f.write(header) for index in range(cpf_lines): if (index % 2 == 0): cpf = __UTIL.generate_valid_cpf() parent_cpf = __UTIL.generate_valid_cpf() else: cpf = __UTIL.generate_invalid_cpf()
def wrapper(subjuct, curpos, cap): datas = [{'id': cap}] res = checkRule.check(datas, 'id') if not res: raise ParseException, 'check_ID ' + Util.value2pystr(cap)