def hill(self): currentState = self.startState nextEval = Heuristic(currentState).attacks() i = 0 while i < self.iterate and nextEval != 0: newState = self.neighbor.generateState() currentEval = Heuristic(newState).attacks() if self.update_states: print Heuristic(currentState).queensPosition( ), " -> ", Heuristic(newState).queensPosition() if currentEval <= nextEval: currentState = newState nextEval = Heuristic(currentState).attacks() i += 1 self.neighbor = Neighbor(currentState) file.write(Heuristic(currentState).queensPosition(), self.neighbor.createBoard(), url="./resource/newBoard.txt") print "Hill Comum > Iteracao : ", i print "Posicao Inicial das ", len( self.startState), " rainhas : ", Heuristic( self.startState).queensPosition() print "Posicao Final das ", len( self.startState), " rainhas : ", Heuristic( currentState).queensPosition() print "\tNumero de rainhas atacando : ", Heuristic( currentState).attacks() self.startState = currentState return Heuristic(currentState).attacks()
def start(self): """how does shout start up. first configuration (if not supplied). tests and checks. then finally starts""" #os.system('clear') msg = """ Estos son los elementos básicos para transmitir. Si estás contentx con esta configuración puedes empezar a transmitir con la tecla <enter>. Si no, puedes editar la configuración tecleando el número o letra que corresponda a la información que quieras cambiar: """ options = [] for i in self.basic_opts: ice = self.config['icecast2-0'] if ice.has_key(i): options.append((i, ice[i])) from interface import menu footer = """ P) Para regresar todos los campos a los valores (P)redeterminados O) Otras (O)pciones S) (S)alir intro|enter) Iniciar transmission ______________________""" bmenu = menu(options, 'configuración', msg, footer) opt = bmenu.display() lopt = opt.lower() if lopt == 's': bmenu.bye_bye() elif lopt == 'o': self.options_menu() elif lopt == 'o': self.config = self.defaults self.start() elif opt == '': file.write(self.config, self.cpath) if not self.get_depends(): self.start_app('darkice') else: os.system('clear') print 'dependencias' for d in self.get_depends(): print 'nuestra configuración depende de: %s' % d print 'vamos asegurar que estan corriendo %s' % d if self.is_not_running(d): print """ %s no esta corriendo, intentaremos arrancarlo en el caso de alguna falla intenta reiniciarlo manualmente""" % d if d == 'jackd': os.system('jack.ctl stop') os.system('jack.ctl start') sleep(2) elif d == 'icecast2': os.system('sudo /etc/init.d/icecast2 restart') sleep(2) else: print "%s esta corriendo, avanzando" % d self.start_app('darkice') self.start() else: self.catch_options(options, opt) self.start()
def simulate(self): i = 0 success = sys.maxsize currentState = self.startState t = self.startTemp solutions = [] while not (success == 0) and i < self.iterate: j = 0 success = 0 while success <= self.maxSuc and j < self.maxDis: f1 = Heuristic(currentState).attacks() newState = self.neighbor.generateState() if Heuristic(newState).attacks() == 0: if not Heuristic(newState).queensPosition() in solutions: solutions.append(Heuristic(newState).queensPosition()) if self.state_update: print Heuristic(currentState).queensPosition( ), " -> ", Heuristic(newState).queensPosition() f2 = Heuristic(newState).attacks() deltaF = f2 - f1 if not t == 0.0: if (deltaF <= 0) or (exp(-deltaF / t) > random.random()): currentState = newState success += 1 j += 1 self.neighbor = Neighbor(currentState) t = self.alpha * t i += 1 file.write(Heuristic(currentState).queensPosition(), self.neighbor.createBoard(), url='./resource/newBoard.txt') print "Contagem final de sucessos : ", success print "Temperatura final : ", t print "Numero de iteracoes : ", i print "Posicao Inicial das ", len( self.startState), " rainhas : ", Heuristic( self.startState).queensPosition() print "Posicao Final das ", len( self.startState), " rainhas : ", Heuristic( currentState).queensPosition() print "\tNumero de rainhas atacando : ", Heuristic( currentState).attacks() print "Solucoes encontradas: " for solution in solutions: print solution return Heuristic(currentState).attacks()
def write(): global fileName global username global password global hostname global ip global files global dirs global crackSecure global sysColor0 global sysColor1 file.write(fileName, username, password, hostname, ip, dirs, files, crackSecure, sysColor0, sysColor1)
def revision_latest_time(stream_info, stream_code_path): command = "" latest_info = "" user_name = '提交人' revision = '版本号' latest_time = '提交时间' try: if stream_info["SCM_TYPE"] == "svn": command = ' svn info --xml '+stream_code_path elif stream_info["SCM_TYPE"] == "git": if not os.path.exists(stream_code_path): return latest_info os.chdir(stream_code_path) command = "git log --pretty=format:\"%an->%h->%ad\" --date=iso -1" p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,shell=True) if stream_info["SCM_TYPE"] == "svn": message = '' file_svn_info_xml = stream_code_path+'/file_svn_info.xml' for line in p.stdout: message += line.decode().strip() +'\n' if "url" in message: with open(file_svn_info_xml, 'w') as file: file.write(message) if os.path.isfile(file_svn_info_xml): tree = ET.ElementTree(file=file_svn_info_xml) for elem in tree.iter(): if "commit" == elem.tag: revision += elem.attrib['revision'] elif "author" == elem.tag: user_name += elem.text elif "date" == elem.tag: latest_time += elem.text latest_info = revision+','+latest_time+','+user_name if os.path.exists(file_svn_info_xml): os.remove(file_svn_info_xml) elif stream_info["SCM_TYPE"] == "git": for line in p.stdout: line = line.decode().strip() if "->" in line: msg_array = line.split('->') if len(msg_array) == 3: user_name += msg_array[0] revision += msg_array[1] latest_time += msg_array[2] latest_info = revision+','+latest_time+','+user_name os.chdir(current_path) except Exception as e: raise Exception(e) return latest_info+'\n'
def cleanup(textfile): print('') print('Reading ' + textfile + '...') txt = file.read(textfile, 0) i = 0 print('Found ' + str(len(txt)) + ' items:') while i < len(txt): print(txt[i]) i += 1 i = 0 print('') print('Finding problems...') print('') newnewtxt = [''] while i < len(txt): test = findthing(txt[i], ':') test = test and findthing(txt[i], '[') test = test and findthing(txt[i], ']') test = test and findthing(txt[i], '"') if test: print('No problem found with ' + txt[i]) newnewtxt.append(txt[i]) elif not test: print('Problem found with ' + txt[i] + '. Cleaning...') i += 1 del newnewtxt[0] print('Checking complete.') print('') print('Opening for write...') newtxt = file.openforwrite(textfile) i = 0 print('') print('New file being created:') print('') while i < len(newnewtxt): string = newnewtxt[i] string = string.replace('invalid', '') string = string.replace('valid', '') string = string.replace('[True]', '') string = string.replace('[False]', '') string = string.replace('[Valid]', '') string = string.replace('[Invalid]', '') string = string.rstrip() print(string) file.write(newtxt, string) i += 1 print('') print('Done') file.closeafterwrite(newtxt) return
def generateState(self): pChange = [] # Posicao de mudanca das rainhas # nChange = len(self.state) / random.randint(1, len(self.state) - 1) # numero de linhas que mudarao nChange = (len(self.state)/2) - 1 # numero de linhas que mudarao # nChange = (len(self.state)/2) # numero de linhas que mudarao for col in range(nChange): pChange.append(random.randint(0, len(self.state) - 1)) for col in range(len(pChange)): self.qP[pChange[col]] = random.randint(0, len(self.state) - 1) self.createBoard() file.write(self.qP, self.baseBoard) return file.read('./resource/newBoard.txt')
def update_pylint_config(properties_info): try: with open(properties_info["CONFIG_PATH"], "a", encoding='utf-8') as file: if 'OPEN_CHECKERS' in properties_info: file.write('enable=' + properties_info['OPEN_CHECKERS'].replace(';', ',') + '\n') if 'CHECKER_OPTIONS' in properties_info and 'blank_rules' in properties_info[ 'CHECKER_OPTIONS']: checker_options = json.loads( properties_info['CHECKER_OPTIONS']) option_list = json.loads(checker_options['blank_rules']) keys = option_list.keys() for key in keys: file.write(key + '=' + option_list[key] + '\n') except Exception as e: raise Exception(e)
def update_spotbugs_config(properties_info): if 'OPEN_CHECKERS' in properties_info: checkers_list = properties_info['OPEN_CHECKERS'].split(';') with open(properties_info["CONFIG_PATH"], 'w') as file: file.write('<?xml version="1.0" encoding="UTF-8"?>\n') file.write('<FindBugsFilter>\n') for checker in checkers_list: if '' == checker: continue file.write('<Match><Bug pattern=\"' + checker + '\" /></Match>\n') file.write('</FindBugsFilter>\n')
def build_json(action, cmd, str): file = open('/home/iosdev747/Desktop/Linus/Linus/output.txt', 'w') data = {} order = [] Image = [] URI = [] Text = [] if not str == "": order.append(3) Text.append(Text.append({"data": str})) data['title'] = action data['order'] = order data['Image'] = Image data['URI'] = URI data['Text'] = Text data['command'] = cmd json_data = json.dumps(data) file.write(json_data) file.close()
def main(): username, password = Login() session = getCookies(username, password) if session == 0: password = rePasswd() session = getCookies(username, password) if session == 0: username, password = Login() session = getCookies(username, password) if session == 0: password = rePasswd() session = getCookies(username, password) if session == 0: print 'Sorry GoodBye~' sys.exit(0) content, who = PageOne(session) # Save file to index.html file = open('index.html', 'w') file.write(content) file.close()
def update_eslint_config(config_file_path, checkers_list, checker_options): try: config_data = {} #delete filter checkers with open(config_file_path, 'r') as file: config_data = json.load(file) checker_list = config_data['rules'] delete_checkers_list = [] for checker in checker_list.keys(): if not checker in checkers_list: delete_checkers_list.append(checker) for delete_checker in delete_checkers_list: if delete_checker in config_data['rules']: config_data['rules'][delete_checker] = 'off' #update checker option map_checker = {'max-len': '{ "code": xxx }'} for key in checker_options.keys(): checker_value = '' for value in json.loads(checker_options[key]).values(): checker_value = value if str(checker_value).isdigit(): checker_value = int(checker_value) else: checker_value = str(checker_value) for map_key in map_checker.keys(): if map_key == key: checker_value = json.loads(map_checker[map_key].replace( 'xxx', str(checker_value))) break config_data['rules'][key] = ["error", checker_value] #write new config with open(config_file_path, 'w') as file: file.write('module.exports =') json.dump(config_data, file, sort_keys=True, indent=4, separators=(',', ':')) file.write(';') except Exception as e: raise Exception(e)
async def runcode(ctx): def check(msg): return msg.author == ctx.author and msg.channel == ctx.channel await ctx.send( "```Enter the entire code you want to test: (discord library and extensions are available by default. Import any other installed library you might need.)```" ) try: scriptres = await client.wait_for("message", check=check, timeout=300.0) except asyncio.TimeoutError: await ctx.send("`TimeOut`") return if len(scriptres.attachments) != 0: script = requests.get(scriptres.attachments[0].url).text else: script = scriptres.content scriptlines = script.split("\n") scriptfunc = "import discord\nfrom discord.ext import commands\nasync def script(ctx, client):\n try:\n" for line in scriptlines: scriptfunc = scriptfunc + " " + line + "\n" scriptfunc = scriptfunc + " except Exception as err:\n await ctx.send(str(err))" with open('file.py', 'w') as file: file.write(scriptfunc) await ctx.send("```Running :```\n") asyncio.create_task(run(ctx, client))
def main(argv): if len(argv) < 2: print('Please enter input and output files as arguments!') sys.exit(0) # first arg is input path input_path = argv[0] # second arg is output path output_path = argv[1] direction = 'asc' # if they specified ascending or descending if len(argv) == 3: if argv[2] == 'desc': direction = 'desc' # reading the file and parsing the names name_list = file.read(input_path) # sorting the names sorted_name_list = sort_names.sort_by_len_alpha(name_list, direction) # write the sorted names to the output file file.write(output_path, sorted_name_list)
def update_checkstyle_config(properties_info): try: tree = ET.ElementTree(file=properties_info["CONFIG_PATH"]) root = tree.getroot() checker_style = ['com.tencent.checks.'] if 'OPEN_CHECKERS' in properties_info: checkers_list = properties_info['OPEN_CHECKERS'].split(';') open_checkers_list = [] for checker in checkers_list: if re.search("^com.tencent.checks.", checker): open_checkers_list.append(checker) continue short_checker = checker.rsplit('.', 1)[1] if short_checker.endswith('Check'): open_checkers_list.append(short_checker[:-5]) checkers_list = open_checkers_list #delete skip_checkers for module in root.findall('module'): if module.attrib['name'] != 'TreeWalker' and not module.attrib[ 'name'] in checkers_list: root.remove(module) elif module.attrib['name'] == 'TreeWalker': for tree_walker_module in module.findall('module'): if not tree_walker_module.attrib[ 'name'] in checkers_list: module.remove(tree_walker_module) #update checker option if 'CHECKER_OPTIONS' in properties_info: change_checker_options = {} checker_options = json.loads( properties_info['CHECKER_OPTIONS']) for key in checker_options.keys(): if re.search("^com.tencent.checks.", key): change_checker_options[key] = checker_options[key] continue short_checker = key.rsplit('.', 1)[1] if short_checker.endswith('Check'): change_checker_options[ short_checker[:-5]] = checker_options[key] checker_options = change_checker_options for module in root.findall('module'): if module.attrib['name'] != 'TreeWalker' and module.attrib[ 'name'] in checkers_list and module.attrib[ 'name'] in checker_options: option_list = json.loads( checker_options[module.attrib['name']]) keys = option_list.keys() for key in keys: if key == 'tokens': for token in option_list[key]: ET.SubElement(module, 'property', attrib={ 'name': key, 'value': token }) else: for property in module.findall('property'): if key in property.attrib['name']: property.attrib['value'] = option_list[ key] elif module.attrib['name'] == 'TreeWalker': for tree_walker_module in module.findall('module'): if tree_walker_module.attrib[ 'name'] in checkers_list and tree_walker_module.attrib[ 'name'] in checker_options: option_list = json.loads(checker_options[ tree_walker_module.attrib['name']]) keys = option_list.keys() for key in keys: if key == 'tokens': for token in option_list[key]: ET.SubElement(tree_walker_module, 'property', attrib={ 'name': key, 'value': token }) else: for property in tree_walker_module.findall( 'property'): if key in property.attrib['name']: property.attrib[ 'value'] = option_list[key] with open(properties_info["CONFIG_PATH"], 'wb') as file: file.write( '<?xml version="1.0" encoding="UTF-8" ?><!DOCTYPE module PUBLIC "-//Checkstyle//DTD Checkstyle Configuration 1.3//EN" "https://checkstyle.org/dtds/configuration_1_3.dtd">' .encode('utf8')) tree.write(file, 'utf-8') except Exception as e: raise Exception(e)
def check(): global go_build_status stream_code_path = "" project_goml_json = "" skip_paths = "" #添加过滤路径 default_disable_linter = "" #获取默认屏蔽linter列表 build_failed_disable_linter = "" #获取编译失败屏蔽linter列表 stream_result_path = "" current_path = sys.path[0] scan_path = "" go_path = "" bug_data_list = [] go_build_message = "" checkers_options = '' scan_finish_message = '' if 'STREAM_CODE_PATH' in stream_info: stream_code_path = stream_info['STREAM_CODE_PATH'] if 'PROJECT_GOML_JSON' in stream_info: project_goml_json = stream_info['PROJECT_GOML_JSON'] for linter in default_skip_linter: default_disable_linter += " --disable=" + linter + " " if 'STREAM_RESULT_PATH' in stream_info: stream_result_path = stream_info['STREAM_RESULT_PATH'] if "GOROOT" in stream_info: os.environ["GOROOT"] = stream_info['GOROOT'] if "GO15VENDOREXPERIMENT" in stream_info: os.environ["GO15VENDOREXPERIMENT"] = stream_info[ 'GO15VENDOREXPERIMENT'] if 'SUB_CODE_PATH_LIST' in stream_info and stream_info[ 'SUB_CODE_PATH_LIST'] != '': sub_code_path_list = stream_info['SUB_CODE_PATH_LIST'].split(',') sub_path_list = [ ''.join(stream_code_path + '/' + path).replace('//', '/') for path in sub_code_path_list ] find_path = stream_code_path stream_info['SKIP_PATHS'] += util.add_skip_path( '', stream_code_path, find_path, sub_path_list) if "SKIP_PATHS" in stream_info: skip_path_list = stream_info['SKIP_PATHS'].split(';') for skip_path in skip_path_list: skip_path = skip_path.replace(".*/", '').replace("/.*", '').replace( ".*", '').replace("*", '') if skip_path.replace(' ', '') == "": continue if re.search("^src/", skip_path): skip_path = skip_path[4:] skip_paths += " --skip=\"" + skip_path + "\" " if 'CHECKER_OPTIONS' in stream_info and stream_info[ 'CHECKER_OPTIONS'] != '': checker_options = json.loads(stream_info['CHECKER_OPTIONS']) for checker_option in checker_options.values(): checker_option = json.loads(checker_option) keys = checker_option.keys() for key in keys: checkers_options += ' --' + key + '=' + checker_option[key] if stream_code_path == '' or project_goml_json == '' or stream_result_path == '': print('below option is empty!') print('stream_code_path: ' + stream_code_path) print('project_goml_json: ' + project_goml_json) print('stream_result_path: ' + stream_result_path) exit(1) go_path = stream_code_path workspace = stream_code_path if "REL_PATH" in stream_info and stream_info['REL_PATH'] != '': go_path = ''.join(go_path.replace(stream_info['REL_PATH'], '')) workspace = go_path if "GO_PATH" in stream_info and stream_info['GO_PATH'] != '': rel_go_path_list = stream_info['GO_PATH'].split(';') for rel_go_path in rel_go_path_list: if os.path.exists(workspace + '/' + rel_go_path): go_path += os.pathsep + workspace + '/' + rel_go_path os.environ["GOPATH"] = go_path os.chdir(stream_code_path) print('GOPATH: ' + go_path) command = "go build ./..." go_build_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True, env=dict(os.environ, LANG="C", LC_ALL="C")) try: pid_config.add_pid(str(go_build_p.pid), stream_info["PID_FILE"]) for line in go_build_p.stdout: line = str(line.decode('utf-8')) if "" != line and 'GOPATH' in line: go_build_message += line.replace(workspace, '$WORKSPACE').replace( '(from $GOPATH)', '') finally: go_build_p.terminate() go_build_p.wait() if "WORKSPACE" in go_build_message: scan_finish_message += "Please check your GOPATH para in CodeCC. If you don't upload all golang dependent libraries to svn/git, please ignore this warning. \nCannot find below package: \n" + go_build_message print(scan_finish_message) if 'STREAM_DATA_PATH' in stream_info and os.path.exists( stream_info['STREAM_DATA_PATH']): with open(stream_info['STREAM_DATA_PATH'] + '/go_build.log', "w", encoding='utf-8') as go_build_file: go_build_file.write(scan_finish_message) go_build_status = 'false' if 'false' == go_build_status: for linter in go_build_faild_skip_linter: build_failed_disable_linter += " --disable=" + linter + " " #codecc_web.upload_goml_project_dir_struct_checker(stream_info['TOOL_TYPE'].upper(), 'true', 'true') print("go gometalinter ./...") command = "gometalinter ./... --sort=path --deadline=60m --json --enable-all " + checkers_options + default_disable_linter + " " + build_failed_disable_linter + " " + skip_paths + " --exclude=vendor -j 2" goml_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(goml_p.pid), stream_info["PID_FILE"]) for line in goml_p.stdout: line = str(line.decode('utf-8')) #过滤不是告警行 if not 'severity' in line: continue result = json.loads(line.replace('},', '}')) if 'vet' == result['linter']: result['message'] = 'vet/vet->' + result['message'] if 'gas' == result['linter']: result['message'] = result['message'].replace(',xxx', '') if build_error_check(result, stream_info): continue #print(util.get_datetime()+" "+line) bug_data_list.append(result) with open(project_goml_json, "a+", encoding='utf-8') as file: if len(bug_data_list) > 0: file.write(json.dumps(bug_data_list)) finally: goml_p.terminate() goml_p.wait() os.chdir(current_path) parse_project_goml_json_file_list(stream_info) parse_project_goml_json_file_error(stream_info)
def save(self, name): filename = name + '.pbm' matrix = self.getMatrix() data = matrix.getMatrixData() self.pbm = self.matrixToPbm(data) File.write(self.pbm, filename)
import file content = file.read('file Test.txt') print(content) file.write('file Test.txt', content + '\n\nFooter') content = file.read('file Test.txt') print(content) file.append('file Test.txt', '\nLegal|Copyright') content = file.read('file Test.txt') print(content)
def generate_cubes(): dirName = 'p' + str(plen) + 'o' + str(offset) + 'm' + str(mask) fh = open("output.txt", "a")#Output file for debugging if not os.path.exists(DATA_PATH + '/' + dirName): os.makedirs(DATA_PATH + '/' + dirName) #Read patients list patients = os.listdir(IM_PATH) #for patIdx in xrange(bratsPatients): for p in sorted(patients): #Patstr = str(patIdx+1).zfill(3) #Padding the string with zeros on left #SeqStr = str(seqIdx+1).zfill(3) #truth = nib.load(TRUTH_PATH + '/' + p + '/masks/training' + Patstr + '_' + TimStr + '_mask' + str(mask) + '.nii').get_data() truthf = os.listdir(TRUTH_PATH+'/'+p) truth = mha.new(truthf) truth = truth.data shape = numpy.array(truth.size) numCubes = (shape - 2*(offset/2))/numPred #numpred=plen-offset+1 newShape = 2*(offset/2) + numCubes*numPred cutOff = shape - newShape truthCrop = truth[cutOff[0]/2:cutOff[0]/2 + newShape[0], cutOff[1]/2:cutOff[1]/2 + newShape[1], cutOff[2]/2:cutOff[2]/2 + newShape[2]] SEQ_PATH = IM_PATH + '/' + p #Read sequences list files = os.listdir(SEQ_PATH) #for seqIdx in xrange(bratsSeqs): for i in range(bratsSeqs): f = sorted(files)[i] path = os.path.join(SEQ_PATH,f) if os.path.isfile(path): data = numpy.zeros([bratsChannels,newShape[0],newShape[1],newShape[2]],dtype = 'float32') #data[0,:,:,:] = nib.load(IM_PATH + '/training' + Patstr + '/' + TimStr + '/N_training' + Patstr + '_' + TimStr + '_flair_pp.nii').get_data()[cutOff[0]/2:cutOff[0]/2 + newShape[0],cutOff[1]/2:cutOff[1]/2 + newShape[1],cutOff[2]/2:cutOff[2]/2 + newShape[2]] #data[1,:,:,:] = nib.load(IM_PATH + '/training' + Patstr + '/' + TimStr + '/N_training' + Patstr + '_' + TimStr + '_mprage_pp.nii').get_data()[cutOff[0]/2:cutOff[0]/2 + newShape[0],cutOff[1]/2:cutOff[1]/2 + newShape[1],cutOff[2]/2:cutOff[2]/2 + newShape[2]] #data[2,:,:,:] = nib.load(IM_PATH + '/training' + Patstr + '/' + TimStr + '/N_training' + Patstr + '_' + TimStr + '_pd_pp.nii').get_data()[cutOff[0]/2:cutOff[0]/2 + newShape[0],cutOff[1]/2:cutOff[1]/2 + newShape[1],cutOff[2]/2:cutOff[2]/2 + newShape[2]] #data[3,:,:,:] = nib.load(IM_PATH + '/training' + Patstr + '/' + TimStr + '/N_training' + Patstr + '_' + TimStr + '_t2_pp.nii').get_data()[cutOff[0]/2:cutOff[0]/2 + newShape[0],cutOff[1]/2:cutOff[1]/2 + newShape[1],cutOff[2]/2:cutOff[2]/2 + newShape[2]] data[i,:,:,:] = f.get_data()[cutOff[0]/2:cutOff[0]/2 + newShape[0],cutOff[1]/2:cutOff[1]/2 + newShape[1],cutOff[2]/2:cutOff[2]/2 + newShape[2]] data = data.astype('float32') #count = 0 dataList = [] truthList = [] for ix in xrange(numCubes[0]): startx = ix*numPred endx = startx + numPred + 2*(offset/2) for iy in xrange(numCubes[1]): starty = iy*numPred endy = starty + numPred + 2*(offset/2) for iz in xrange(numCubes[2]): startz = iz*numPred endz = startz + numPred + 2*(offset/2) tumourVoxels = numpy.array(truthCrop[startx+offset/2:endx-offset/2,starty+offset/2:endy-offset/2,startz+offset/2:endz-offset/2]) #NtumourVoxels = numpy.sum(tumourVoxels) unique, counts = np.unique(tumourVoxels, return_counts=True) #y=numpy.asarray((unique)).T z=numpy.asarray((counts)).T sum=numpy.sum(z) if(sum > Threshold): #count = count+1 dataList.append(data[:,startx:endx,starty:endy,startz:endz]) truthList.append(tumourVoxels) #data_file = file(DATA_PATH + '/' + dirName + '/pat_'+ Patstr + '_time_' + TimStr + '.pkl','wb') data_file = file(DATA_PATH + '/' + dirName + '/'+ p + '.pkl','wb') truth_file = file(DATA_PATH + '/' + dirName + '/' + p + '_truth.pkl','wb') cPickle.dump(numpy.array(dataList),data_file,cPickle.HIGHEST_PROTOCOL) cPickle.dump(numpy.array(truthList),truth_file,cPickle.HIGHEST_PROTOCOL) data_file.close() truth_file.close() u= 'Pat ' + p + ' had ' + str(sum) + ' tumour cubes\n' fh.write(u) fh.close
# Combine the active project stage configuration settings # with the default `default.json` configuration settings via # via `.env.json`. We'll use this file as our base source of truth # for generating other configuration file types (.yaml, .env, etc.) default_settings_path = os.path.join(settingsdir, "default.json") default_settings_str = sfile.get(default_settings_path) default_settings_json = json.loads(default_settings_str) stage_settings_path = os.path.join(settingsdir, "{}.json".format(stage)) stage_settings_str = sfile.get(stage_settings_path) stage_settings_json = json.loads(stage_settings_str) settings_json = jsonmerge.merge(default_settings_json, stage_settings_json) settings_json_str = json.dumps(settings_json, indent=4, sort_keys=True) sfile.write(".env.json", settings_json_str) # Create `.env.yaml` settings_yaml = yaml.load(settings_json_str, Loader=yaml.SafeLoader) settings_yaml_str = yaml.dump(settings_yaml) sfile.write(".env.yaml", settings_yaml_str) # Create `.env` settings_env_str = senv.json2env(settings_json_str) sfile.write(".env", settings_env_str) # Create `.tool-versions.env` tool_versions_path = os.path.join(rootdir, ".tool-versions") tool_versions_str = sfile.get(tool_versions_path) tool_versions_env_str = senv.toolversions2env(tool_versions_str) sfile.write(".tool-versions.env", tool_versions_env_str)
elif choose[1] == '2': delete_typeitem(list_typeofitem) elif choose[1] == '3': edit_typeitem(list_typeofitem) elif choose[1] == '4': search_typeitem(list_typeofitem) elif choose[1] == "r": choose[1] = "" break elif choose[1] == "q": choose[0] = 'q' break else: print('Please input again, your input is wrong') elif choose[0] == "q": break else: print('Please input again, your input is wrong') print("Thanks for using our app") list_item_tem, list_typeofitem = readreturn() list_item = [] for ele in list_item_tem: item = Item.from_string(ele) list_item.append(item) print("WELCOME") print("Please select the function") receive_input(list_item, list_typeofitem) write(list_item, list_typeofitem)
def translate_blame_xml(file_path_blame, txt_file_path_blame, stream_info): try: if os.path.isfile(file_path_blame): if stream_info["SCM_TYPE"] == "svn": tree = ET.ElementTree(file=file_path_blame) with open(txt_file_path_blame, "w", encoding = 'utf-8') as file: for elem in tree.iter(): if "entry" == elem.tag: line_info = elem.attrib['line-number'] for subelem in elem.iter(): if "author" == subelem.tag: line_info = line_info +"->"+subelem.text if "date" == subelem.tag: line_info = line_info +"->"+subelem.text file.write(line_info+"\n") elif stream_info["SCM_TYPE"] == "git": with open(txt_file_path_blame, "w", encoding = 'utf-8') as file: try: with open(file_path_blame, "r", encoding='utf-8') as blame_file: lines = blame_file.readlines() for line in lines: line = ' '.join(line.replace('-', '+').replace(' +', '+').split()) line = line[line.index('('):] line_arrary = line.split(' ') if len(line_arrary) >= 3: author = line_arrary[0][1:] num_line = '' change_time = '' if "(" in author: author = author.split('(')[0] if not '+' in line_arrary[1]: author += line_arrary[1] if len(line_arrary) < 4: continue num_line = line_arrary[3][:-1] change_time = datetime.datetime.fromtimestamp(int(line_arrary[2].split('+')[0])).strftime('%Y-%m-%dT%H:%M:%S.%f%z') else: num_line = line_arrary[2][:-1] change_time = datetime.datetime.fromtimestamp(int(line_arrary[1].split('+')[0])).strftime('%Y-%m-%dT%H:%M:%S.%f%z') line_info = num_line + '->'+author+'->'+change_time file.write(line_info+"\n") except: with open(file_path_blame, "rb") as blame_file: lines = blame_file.readlines() for line in lines: line = str(line) line = ' '.join(line.replace('-', '+').replace(' +', '+').split()) line = line[line.index('('):] line_arrary = line.split(' ') if len(line_arrary) >= 3: author = line_arrary[0][1:] num_line = '' change_time = '' if "(" in author: author = author.split('(')[0] if not '+' in line_arrary[1]: author += line_arrary[1] if len(line_arrary) < 4: continue num_line = line_arrary[3][:-1] change_time = datetime.datetime.fromtimestamp(int(line_arrary[2].split('+')[0])).strftime('%Y-%m-%dT%H:%M:%S.%f%z') else: num_line = line_arrary[2][:-1] change_time = datetime.datetime.fromtimestamp(int(line_arrary[1].split('+')[0])).strftime('%Y-%m-%dT%H:%M:%S.%f%z') line_info = num_line + '->'+author+'->'+change_time file.write(line_info+"\n") except Exception as e: raise Exception(e)
def dashboard(): if request.method == "POST": question = request.form["question"] write([question]) return render_template("dashboard.html")
def check(): stream_code_path = "" project_dupc_xml = "" pool_processes = "" pid_file = "" skip_paths_arg = "" suffix_list = [] if 'STREAM_CODE_PATH' in stream_info: stream_code_path = stream_info['STREAM_CODE_PATH'] if 'PROJECT_DUPC_XML' in stream_info: project_dupc_xml = stream_info['PROJECT_DUPC_XML'] if 'POOL_PROCESSES' in stream_info: pool_processes = stream_info['POOL_PROCESSES'] if 'PID_FILE' in stream_info: pid_file = stream_info['PID_FILE'] if 'TARGET_SUBFIXS' in stream_info: suffix_list = stream_info['TARGET_SUBFIXS'].split(';') if 'SUB_CODE_PATH_LIST' in stream_info and stream_info['SUB_CODE_PATH_LIST'] != '': sub_code_path_list = stream_info['SUB_CODE_PATH_LIST'].split(',') sub_path_list = [''.join(stream_code_path+'/'+path).replace('//','/') for path in sub_code_path_list] find_path = stream_code_path stream_info['SKIP_PATHS'] += util.add_skip_path('', stream_code_path, find_path, sub_path_list) if 'SKIP_PATHS' in stream_info: skip_path_list = stream_info['SKIP_PATHS'].split(';') skip_paths_arg = get_skip_paths_arg(stream_code_path, skip_path_list) if stream_code_path == '' or project_dupc_xml == '' or pool_processes == '' or pid_file == '': print('below option is empty!') print('stream_code_path: '+stream_code_path) print('project_dupc_xml: '+project_dupc_xml) print('pool_processes: '+pool_processes) print('pid_file: '+pid_file) exit(1) current_path=sys.path[0]+'/../' dupc_tool_path = os.path.join(current_path, 'tools_dep/dupc/bin/run.sh') os.chmod(dupc_tool_path, 0o755) #print('scaning...') suffix_list = map_suffix_list(suffix_list) language_xml_list = [] print(suffix_list) for suffix in suffix_list: command = dupc_tool_path+" cpd "+"--minimum-tokens 100 --format xml --encoding utf-8 "+ \ " --files "+stream_code_path+" --language "+suffix+" --skip-lexical-errors"+skip_paths_arg +" 2>/dev/null" dupc_p = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True, start_new_session=True) try: pid_config.add_pid(str(dupc_p.pid), pid_file) xml_suffix = "_" + suffix + ".xml" language_dupc_xml = project_dupc_xml.replace(".xml", xml_suffix) with open(language_dupc_xml, "a+", encoding = 'utf-8') as file: is_codefrag = False codefrag = "" indent = "" for line in dupc_p.stdout: line_str = str(line.decode("utf-8")) #不再将codefragment写入xml,用md5对codefragment转换为fingerprint写入 if "<codefragment>" in line_str and "</codefragment>" in line_str: is_codefrag = False codefrag = "" indent = re.search(".*(?=<codefragment)", line_str).group(0) finger_print = indent + "<fingerprint>" + get_md5(line_str) + "</fingerprint>\n" file.write(finger_print) elif "<codefragment>" in line_str: is_codefrag = True codefrag += line_str indent = re.search(".*(?=<codefragment)", line_str).group(0) elif "</codefragment>" in line_str: is_codefrag = False codefrag += line_str finger_print = indent + "<fingerprint>" + get_md5(codefrag) + "</fingerprint>\n" codefrag = "" indent = "" file.write(finger_print) else: if is_codefrag: codefrag += line_str else: file.write(line_str) language_xml_list.append(language_dupc_xml) finally: dupc_p.terminate() dupc_p.wait() os.chdir(current_path) merge_language_xml_files(project_dupc_xml, language_xml_list) parse_project_dupc_xml_file_list(project_dupc_xml, stream_info['PROJECT_FILE_LIST']) parse_project_dupc_xml_to_json(project_dupc_xml, stream_info['PROJECT_FILE_DUPC_JSON'])
def storeLocalCardInfo(info): print(info) content = file.read(STORED_LOOk_UPS) newContent = content + '###' + info['name'] + '$$$' + info[ 'set'] + '$$$' + info['correctedName'] + '$$$' + str(info['reprint']) file.write(STORED_LOOk_UPS, newContent)
def write_data(m, t, path, dot, dir, f, a): path_save = r'' with open(os.path.join(path_save, 'checked_files.txt'), 'a+') as file: file.write(f'{path};{dir};{f};{a};{dot};{m};{t}\n') file.close()
#json.dump() Python内置类型序列化为json对象后写入文件 listStr = [{"city": "北京"}, {"name": "大刘"}] json.dump(listStr, open("listStr.json","w"), ensure_ascii=False) dictStr = {"city": "北京", "name": "大刘"} json.dump(dictStr, open("dictStr.json","w"), ensure_ascii=False) #json.load() 读取文件中json形式的字符串元素 转化成python类型 import json strList = json.load(open("listStr.json")) #储存 import json filename = 'file.json' with open(filename,'w') as a: json.dumps(objects) #要储存的数据,可用于存数数据的文件filename with open('dataname.json','w') as file: #用这个 file.write(json.dumps(data)) with open('dataname1.json','w') as file: #若有中文字 file.write(json.dumps(listStr,ensure_ascii=False)) ##JsonPath JsonPath 对于 JSON 来说,相当于 XPATH 对于 XML。 XPath JSONPath 描述 / $ 根节点 . @ 现行节点 / .or[] 取子节点 .. n/a 取父节点,Jsonpath未支持 // .. 就是不管位置,选择所有符合条件的条件 * * 匹配所有元素节点 @ n/a 根据属性访问,Json不支持,因为Json是个Key-value递归结构,不需要。 [] [] 迭代器标示(可以在里边做简单的迭代操作,如数组下标,根据内容选值等) | [,] 支持迭代器中做多选。 [] ?() 支持过滤操作.
def checkemailbulk(): while True: print('') print( 'Please input the path of the ".txt" file containing email addresses on each file.' ) filename2 = str(input(' -> ')) if filename2[-4:] == '.txt': break else: print('Incorrect file extension, must be ".txt". Try Again...') filename = filename2 fn = filename2.split('/')[-1] filename2 = filename2.replace(fn, '') resultsfilename = filename2 + "resultsfor" + fn f = file.openforwrite(resultsfilename) print('Please note that if you already have a file named "' + resultsfilename + '", it will be erased and replaced.') input('Enter to continue. Quit if you want.') print('') print('Opening -> ' + filename) print('') fileinfo = file.read(filename, 0) numberofemails = len(fileinfo) print('Found ' + str(numberofemails) + ' items -> ') print('') i = 0 while i < len(fileinfo): emailnumber = i + 1 print(' ' + str(emailnumber) + ' - ' + fileinfo[i]) i += 1 print('') print(' <- ') print('') print('Running tests...') print('') print('Starting Process...') emailcount = 0 file.write(f, 'Reslts for email validation:') print('') fileinforesults = [] while emailcount < numberofemails: print(' Tests for <' + fileinfo[emailcount] + '> : ') fileinforesults.append(True) email = comments(fileinfo[emailcount]) test = 0 """This is where the functions (rules) are laid out (each returns Valid or Invalid): """ tests = { 0: testfunction, 1: pythonparser, 2: checkat, 3: localpartdotcheck, 4: checklocalpartlength, 5: checkdomain, 6: alphabet, 7: checktopleveldomain, } while test < len(tests): fileinforesults[emailcount] = fileinforesults[emailcount] and log( tests[test](email.lower())) test += 1 if not fileinforesults[emailcount]: break else: pass """End""" print( strbool(fileinforesults[emailcount]) + ': <' + fileinfo[emailcount] + '>') file.write( f, str(fileinfo[emailcount]) + " [" + strbool(fileinforesults[emailcount]) + "]") emailcount += 1 print('') file.closeafterwrite(f) print('Email Validation Finished. New file created (' + resultsfilename + ') with results inside. Program ending.') t = 0 x = 0 y = 0 while t < len(fileinforesults): if fileinforesults[t]: x += 1 else: y += 1 t += 1 total = x + y percentx = (x / total) * 100 percenty = (y / total) * 100 print('Number of valid: ' + str(x) + '/' + str(total) + ' OR ' + str(percentx) + '%') print('Number of invalid: ' + str(y) + '/' + str(total) + ' OR ' + str(percenty) + '%') os.remove('tlds-alpha-by-domain.txt') print('') return
import file content = file.read('file Test.txt') print(content) file.write('file Test.txt',content + '\n\nFooter') content = file.read('file Test.txt') print(content) file.append('file Test.txt','\nLegal|Copyright') content = file.read('file Test.txt') print(content)
import file a = int(input()) file.write(a)