def itemConstants(itemFile, constantsFile, cwd, keywords=KW, suffix=""): cwd = util.find(cwd, util.root) fileName = util.find(itemFile, cwd) path = os.path.join(CONSTANTS_DIR, constantsFile) w = open(fileName, 'r') t = w.read() w.close() libName = itemFile.replace("_insert.j", "") header = "library " + libName.title() + "Constants\n" header = header.replace("_", "") header += "globals\n" itemId = getId.findall(t)[0] varNames = [util.name2Var(x) for x in getItemName.findall(t)] totals = [int(x) for x in getTotal.findall(t)] if totals != []: for var in range(0, len(varNames)): for x in range(0, totals[var]): header += "\tconstant integer " + varNames[var] + "_" + str( x) + suffix + " = '" + itemId + "'\n" itemId = util.add(itemId) else: for var in range(0, len(varNames)): header += "\tconstant integer " + varNames[ var] + suffix + " = '" + itemId + "'\n" itemId = util.add(itemId) header += "endglobals\nendlibrary" w = open(path, 'w') print >> w, header w.close()
def genWarpInit(readfile="warps_insert.j", headerfile="warp_init_header.j", keyword="//insert", cwd="lua"): cwd = util.find(cwd, util.root) writefile = util.find(headerfile.replace("_header", "_insert"), util.root) headerfile = util.find(headerfile, util.root) names = util.getInsertFileData(readfile)[0] insertStr = "" for nameData in names: data = nameData.split("|") name = data[0] exitName = name + " exit" isActive = "true" if len(data) > 1: isActive = data[1] if name not in special and getIsExit.findall(name) == []: insertStr += "\t\tset w = Warp.create(\"" + exitName + " to " + name + "\", " + util.name2Var( name) + "_LOC)\n" insertStr += "\t\tset playerDatum[i].warps[" + util.name2Var( exitName) + "] = w\n" insertStr += "\t\tset w = Warp.create(\"" + name + " to " + exitName + "\", " + util.name2Var( exitName) + "_LOC)\n" insertStr += "\t\tset playerDatum[i].warps[" + util.name2Var( name) + "] = w\n" if isActive == "false": insertStr += "\t\tset w.isActive = false\n" w = open(headerfile, 'r') t = w.read() w.close() t = t.replace("//insert", insertStr) w = open(writefile, 'w') print >> w, t w.close()
def genWarpInit(readfile = "warps_insert.j", headerfile = "warp_init_header.j", keyword = "//insert", cwd = "lua"): cwd = util.find(cwd, util.root) writefile = util.find(headerfile.replace("_header", "_insert"), util.root) headerfile = util.find(headerfile, util.root) names = util.getInsertFileData(readfile)[0] insertStr = "" for nameData in names: data = nameData.split("|") name = data[0] exitName = name + " exit" isActive = "true" if len(data) > 1: isActive = data[1] if name not in special and getIsExit.findall(name) == []: insertStr += "\t\tset w = Warp.create(\"" + exitName + " to " + name + "\", " + util.name2Var(name) + "_LOC)\n" insertStr += "\t\tset playerDatum[i].warps[" + util.name2Var(exitName) + "] = w\n" insertStr += "\t\tset w = Warp.create(\"" + name + " to " + exitName + "\", " + util.name2Var(exitName) + "_LOC)\n" insertStr += "\t\tset playerDatum[i].warps[" + util.name2Var(name) + "] = w\n" if isActive == "false": insertStr += "\t\tset w.isActive = false\n" w = open(headerfile, 'r') t = w.read() w.close() t = t.replace("//insert", insertStr) w = open(writefile, 'w') print>>w, t w.close()
def genWarpLocs(readfile="warps_insert.j", headerfile="locations_header.j", keyword="//insert", cwd="lua"): cwd = util.find(cwd, util.root) writefile = util.find(headerfile.replace("_header", "_insert"), util.root) headerfile = util.find(headerfile, util.root) names = util.getInsertFileData(readfile)[0] globalsStr = "" insertStr = "" for nameData in names: data = nameData.split("|") name = data[0] locName = name + " loc" idName = name + " id" if name not in special: globalsStr += "\tlocation " + util.name2Var(locName) + "\n" insertStr += "\tset " + util.name2Var( locName) + " = getNPCLoc(" + util.name2Var(idName) + ")\n" w = open(headerfile, 'r') t = w.read() w.close() t = t.replace("//globals", globalsStr) t = t.replace("//insert", insertStr) w = open(writefile, 'w') print >> w, t w.close()
def itemConstants(itemFile, constantsFile, cwd, keywords = KW, suffix = ""): cwd = util.find(cwd, util.root) fileName = util.find(itemFile, cwd) path = os.path.join(CONSTANTS_DIR, constantsFile) w = open(fileName, 'r') t = w.read() w.close() libName = itemFile.replace("_insert.j", "") header = "library " + libName.title() + "Constants\n" header = header.replace("_", "") header += "globals\n" itemId = getId.findall(t)[0] varNames = [util.name2Var(x) for x in getItemName.findall(t)] totals = [int(x) for x in getTotal.findall(t)] if totals != []: for var in range(0, len(varNames)): for x in range(0, totals[var]): header += "\tconstant integer " + varNames[var] + "_" + str(x) + suffix + " = '" + itemId + "'\n" itemId = util.add(itemId) else: for var in range(0, len(varNames)): header += "\tconstant integer " + varNames[var] + suffix + " = '" + itemId + "'\n" itemId = util.add(itemId) header += "endglobals\nendlibrary" w = open(path, 'w') print>>w, header w.close()
def __init__(self): name = platform.node() Train_Path = '' Eval_Path = '' self.source_path = '' if not name == 'motel': Train_Path = './Segmentation_Data' Eval_Path = './Segmentation_Data' self.source_path = './Segmentation_Data' else: Train_Path = '/local/scratch/public/sl767/LUNA/Training_Data' Eval_Path = '/local/scratch/public/sl767/LUNA/Evaluation_Data' self.source_path = '/local/scratch/public/sl767/LUNA/Standard_Eval/' # List the existing training data self.training_list = ut.find('*.dcm', Train_Path) self.training_list_length = len(self.training_list) print('Training Data found: ' + str(self.training_list_length)) self.eval_list = ut.find('*.dcm', Eval_Path) self.eval_list_length = len(self.eval_list) print('Evaluation Data found: ' + str(self.eval_list_length)) # List the existing training data according to their xml files self.xml_training_list = ut.find('*.xml', Train_Path) self.xml_training_list_length = len(self.xml_training_list) print('XML Training Data found: ' + str(self.xml_training_list_length)) self.xml_eval_list = ut.find('*.xml', Eval_Path) self.xml_eval_list_length = len(self.xml_eval_list) print('XML Evaluation Data found: ' + str(self.xml_eval_list_length))
def __init__(self): # set up the training data file system self.train_list = ut.find('*.jpg', './BSDS/images/train') self.train_amount = len(self.train_list) print('Training Pictures found: ' + str(self.train_amount)) self.eval_list = ut.find('*.jpg', './BSDS/images/val') self.eval_amount = len(self.eval_list) print('Evaluation Pictures found: ' + str(self.eval_amount))
def process_gender_json(response): response = process_json(response) empty_idx = util.find(response, 'gender', '') u_idx = util.find(response, 'gender', 'U') if empty_idx is not None: empty_count = int(response[empty_idx]['gender_count']) response[u_idx]['gender_count'] += empty_count response.pop(empty_idx) return response
def insert(insertFile, insertDir, newFile, cwd, header, keywords = []): fileNames = util.getFileNames(insertDir, cwd, keywords) insertFile = util.find(insertFile, util.find(cwd, util.root)) insertions = "" for fileName in fileNames: w = open(fileName, 'r') insertions += w.read() + "\n" w.close() w = open(insertFile, 'r') t = w.read().replace(header, insertions) insertFile = insertFile.replace(getInsertFileDir.findall(insertFile)[-1], '') w = open(os.path.join(insertFile, newFile), 'w') print>>w, t w.close()
def psiu_40(z_L): """ usage: psi = psiu_40(z_L) Computes velocity structure function given z/L """ from numpy import exp, log, sqrt, arctan, min, asarray, copy from util import find zet = copy(asarray(z_L, dtype=float)) # conversion to ndarray float dzet = 0.35*zet dzet[dzet>50] = 50. # stable a = 1. b = 3./4. c = 5. d = 0.35 psi = -(a*zet + b*(zet - c/d)*exp(-dzet) + b*c/d) k = find(zet < 0) # unstable x = (1. - 18.*zet[k])**0.25 psik = 2.*log((1.+x)/2.) + log((1.+x*x)/2.) - 2.*arctan(x) + 2.*arctan(1.) x = (1. - 10.*zet[k])**0.3333 psic = 1.5*log((1.+x+x**2)/3.) - sqrt(3.)*arctan((1.+2.*x)/sqrt(3.)) psic += 4.*arctan(1.)/sqrt(3.) f = zet[k]**2 / (1.+zet[k]**2) psi[k] = (1-f)*psik + f*psic return psi
def plotPoint(X, Z, Z_error, fig, ax, X_label, Z_label, legend, barSize, logX, logY, legendExtra="", pointType=0, generateRandomColor=False): pt = findStartWith (legendExtra, POINT_TYPE_CORRESPONDENCE) if (pt < 0): pointType = POINT_TYPE_LIST[pointType] else: pointType = POINT_TYPE_LIST[pt] if (generateRandomColor): col = generateMappedRandomColor() else: col = find (legend, COLOR_CORRESPONDENCE) if (col < 0): col = COLOR_DEFAULT else: col = COLOR_LIST[col] legend = legend + " (" + legendExtra + ")" ax.plot(X, Z, "-"+pointType, color=col, label=legend, markersize =7) if (Z_error != None): ax.fill_between(X, Z_error[1], Z_error[0], color=col, alpha=0.1) ax.set_ylabel(Z_label) ax.set_xlabel(X_label) if (logX): ax.set_xscale('log') if (logY): ax.set_yscale('log')
def results(): form = ReviewForm(request.form) if request.method == 'POST': latitude = form.Latitude.data longitude = form.Longitude.data month = request.form.get('month') neighborhood = request.form.get('neighborhood') hour = request.form.get('hour') data = [month, neighborhood, longitude, latitude, int(hour)] x = find(latitude, longitude, neighborhood) if x: return render_template('results.html', data=data, prediction=x, probability=round(100, 2)) y, proba = classify(data) return render_template('results.html', data=data, prediction=y, probability=round(proba * 100, 2)) return render_template('index.html', form=form, month_list=month_list, hour_list=hour_list, neighborhood_list=neighborhood_list)
def dedudplicate_entries(entries, tree, common_parent_level_limit=2, is_similar_cb=is_subpath_similar): """ Deduplicates list of entries with respect to the given tree. `common_parent_level_limit` - controls how many levels up the search can go to look for a similar file """ unique = [] duplicates = [] for entry in entries: # path is directly found if tree.get(entry.path) is not None: duplicates.append(entry) continue # some common parent is found common_parent, remaining_path = tree.get_partial(entry.path) # uphold the parent level limit if len(remaining_path) <= common_parent_level_limit: similar_child_path = find( lambda subpath: is_similar_cb(remaining_path, subpath), common_parent.walk()) if similar_child_path is not None: duplicates.append(entry) continue unique.append(entry) return unique, duplicates
def burn_to_sd(sdname): image_list = util.get_image_list(workspace) if len(image_list) == 0: print "No meego images have been downloaded." return #get choice of burned images, default is the latest image global INTERRUPT_TIME, KEY_PRESS KEY_PRESS = False t1 = threading.Thread(target=key_press) t1.start() choice = 0 i = 0 while i != INTERRUPT_TIME: if KEY_PRESS: KEY_PRESS = False choice = get_choice(image_list) break print "press 'Enter' to interrupt (", INTERRUPT_TIME-i, ")" i = i+1 time.sleep(1) # compare the size if choice == len(image_list): return img = image_list[choice] sd_list = util.get_disk_list() index = util.find(sdname, sd_list) sdsize = sd_list[index].size #print sdsize if sdsize < MD and sdsize > img.size: burn(img.full_name, sdname) else: print "The sd's size is too small."
def find_material_from_name(materials, name): """ :param materials: マテリアルリスト :param name: 検索マテリアル名 :return: マテリアル名に部分一致するマテリアルを返す。見つからなければNone """ return find(lambda m: name and name in m['name'], materials)
def insert(insertFile, insertDir, newFile, cwd, header, keywords = [], sort = False): fileNames = util.getFileNames(insertDir, cwd, keywords) if sort: fileNames = sorted(fileNames, key=lambda filename: os.stat(filename).st_ctime) insertFile = util.find(insertFile, util.find(cwd, util.root)) insertions = "" for fileName in fileNames: w = open(fileName, 'r') insertions += w.read() + "\n" w.close() w = open(insertFile, 'r') t = w.read().replace(header, insertions) insertFile = insertFile.replace(getInsertFileDir.findall(insertFile)[-1], '') w = open(os.path.join(insertFile, newFile), 'w') print>>w, t w.close()
def burn_to_sd(sdname): image_list = util.get_image_list(workspace) if len(image_list) == 0: print "No meego images have been downloaded." return #get choice of burned images, default is the latest image global INTERRUPT_TIME, KEY_PRESS KEY_PRESS = False t1 = threading.Thread(target=key_press) t1.start() choice = 0 i = 0 while i != INTERRUPT_TIME: if KEY_PRESS: KEY_PRESS = False choice = get_choice(image_list) break print "press 'Enter' to interrupt (", INTERRUPT_TIME - i, ")" i = i + 1 time.sleep(1) # compare the size if choice == len(image_list): return img = image_list[choice] sd_list = util.get_disk_list() index = util.find(sdname, sd_list) sdsize = sd_list[index].size #print sdsize if sdsize < MD and sdsize > img.size: burn(img.full_name, sdname) else: print "The sd's size is too small."
def rerun_test(self, test_case, full_batch): """ Rerun a given testcase :param test_case: The testcase to run again :param full_batch: If true, then the full batch is run, if false only the single testcase :return: The result of the new run """ logging.debug("rerun_test(%s, %s)", test_case, full_batch) file_name = test_case.classname.split(".")[-1] + ".java" logging.debug("Looking for %s", file_name) test_file = util.find(file_name, self.root_dir) logging.debug("Found %s", test_file) if test_file is None: logging.error("Skipping test_case, file_name %s not found", file_name) return None (module_root, name) = os.path.split(test_file) while name and name != "src" and name != "target": (module_root, name) = os.path.split(module_root) logging.debug("Module root is %s", module_root) logging.debug("Removing previous test results") shutil.rmtree(os.path.join(module_root, "target", "surefire-reports"), ignore_errors=True) if os.path.split(module_root)[-1].startswith("qtest"): if not test_case.methodname: logging.info("Do not want to replay %s", test_case.classname) sys.exit(1) command = self.__get_query_test_command(test_case, full_batch) new_test_result = self.__run_test(module_root, command, test_case.classname, test_case.methodname) else: if not test_case.methodname: command = self.__get_junit_test_class_command( test_case.classname) new_test_result = self.__run_test_class( module_root, command, test_case.classname) else: command = self.__get_junit_test_command(test_case, full_batch) new_test_result = self.__run_test(module_root, command, test_case.classname, test_case.methodname) base_dir = os.path.join( self.log_dir, "rerun", test_case.classname + "_" + test_case.methodname) test_result_dir = self.__generate_new_test_results_dir_name(base_dir) self.__store_test_results(module_root, test_result_dir) if new_test_result is not None: logging.debug("Test result is %s %s", new_test_result, new_test_result.good) else: logging.debug("No test result generated") return new_test_result
def calc(monster="slime", lvl=10): path = util.find(monster + ".j", root) w = open(path, 'r') t = w.read() w.close() attrMin = re2Arr(getMin.findall(t)) attrMax = re2Arr(getMax.findall(t)) return simulate(attrMin, attrMax, lvl)
def __init__(self): name = platform.node() Train_Path = '' Eval_Path = '' if name == 'LAPTOP-E6AJ1CPF': Train_Path = './LUNA/Train_Data' Eval_Path = './LUNA/Eval_Data' elif name == 'motel': Train_Path = '/local/scratch/public/sl767/LUNA/Training_Data' Eval_Path = '/local/scratch/public/sl767/LUNA/Evaluation_Data' # List the existing training data self.training_list = ut.find('*.dcm', Train_Path) self.training_list_length = len(self.training_list) print('Training Data found: ' + str(self.training_list_length)) self.eval_list = ut.find('*.dcm', Eval_Path) self.eval_list_length = len(self.eval_list) print('Evaluation Data found: ' + str(self.eval_list_length))
def remove_player(self, player): """Remove the player from the game. pre: isinstance(player, Player) """ team = find(self.teams.values(), lambda team: team.contains_player(player)) self.remove_player_from_team(player, team)
def calc(monster = "slime", lvl = 10): path = util.find(monster + ".j", root) w = open(path, 'r') t = w.read() w.close() attrMin = re2Arr(getMin.findall(t)) attrMax = re2Arr(getMax.findall(t)) return simulate(attrMin, attrMax, lvl)
def result_get_goal_points(result, goal_id): def is_correct_goal(goal): return goal.id == goal_id goal = util.find(is_correct_goal, result.goals) on_fail = lambda: 0 on_success = lambda: goal.points return util.crossroad(lambda: goal, on_success, on_fail)
def home(): if request.method == "GET": return render_template("index.html") if request.method == "POST": query = request.form.get("query") results = util.find(query) if results is None: return render_template("index.html",query=query,error="Your query is not valid") return render_template("index.html",query=query,results=results)
def assert_find(filter, alen, aelf, aell): tf = pj(scriptdir, 'testdata', 'find') res = find(tf, filter) assert len(res) == alen assert \ re.search(r'[^' + os.path.sep + ']+$', res[0]).group(0) == aelf assert \ re.search(r'[^' + os.path.sep + ']+$', res[alen - 1]).group(0) == aell print(res)
def matchFilenames(filenames, expecteds): filenameSet = set(filenames) expectedSet = set(expecteds) assert len(filenameSet) == len(expectedSet) return dict( (expected, find(lambda filename: matchFilename(filename, expected), filenameSet)) for expected in expectedSet )
def add_student_session_problems(starttime, student, problems): is_student = lambda s: s.username == student student_data = util.find(is_student, result.students) student_not_exist = util.constant(not bool(student_data)) is_solved = lambda problem: 'solve_time' in problem add_problem = lambda problem: add_session_problem( student_data, problem, starttime) handle_single_problem = util.cond([(student_not_exist, util.noop), (is_solved, add_problem)]) util.map_now(handle_single_problem, problems)
def index(args): if args.verbose: print('Indexing files...') for path in args.path: if args.verbose: print("Crawling {0} for files...".format(path)) files = util.find(path, dirs=False) for file in files: if args.verbose: print("Indexing {0}...".format(file)) args.collection.index.add_path(file)
def config_path(custom_paths=None): custom_paths = custom_paths or [] paths = [pathlib.Path(custom_path) for custom_path in custom_paths] paths += [ pathlib.Path('/usr/local/etc/kattisrc'), pathlib.Path.home() / '.kattisrc', pathlib.Path.cwd() / '.kattisrc' ] path_exists = lambda p: p.exists() path = util.find(path_exists, paths) return None if not path else str(path)
def filter_fields(schema, obj): rv = {} schema = util.find(lambda x: x.get("name", "") == schema, SCHEMA_OBJECTS) if not schema: return obj for field in [x['name'] for x in schema['fields']]: if field in obj: rv[field] = obj[field] return rv
def filter_fields(schema, obj): rv = {} schema = util.find(lambda x: x.get("name", "") == schema, SCHEMA_OBJECTS) if not schema: return obj for field in [x["name"] for x in schema["fields"]]: if field in obj: rv[field] = obj[field] return rv
def filter_fields(schema, obj): rv = {} schema = util.find(lambda x: x.name == schema, SCHEMA_OBJECTS) if not schema: return obj for field in [x.name for x in schema.fields]: if field in obj: rv[field] = obj[field] return rv
def group_proofs(self): if not self.curry_howarded: self.create_proof_terms() self.sanitize_vars() res = list() for p in self.proofs: f = find( res, lambda q: p.sequent.theorem[1].alpha_equivalent( q.sequent.theorem[1])) if f.is_nothing(): res.append(p) self.witness_proofs = res self.grouped = True
def find_eye_extra_name(gltf): """ > < 目のマテリアル名を取得する :param gltf: glTFオブジェクト :return: > < 目マテリアル名 """ material_names = [m['name'] for m in gltf['materials']] def contain_extra_eye(name): candidates = ['_EyeExtra_', '_FaceEyeSP'] return exists(lambda c: c in name, candidates) return find(contain_extra_eye, material_names)
def test_find(self): self.assertEqual(util.find(42, []), None) self.assertEqual(util.find(42, [42.0]), 42) self.assertEqual(util.find(42, [1, 2, 3]), None) self.assertEqual(util.find(42, [-41, -42, -43], key=operator.neg), -42) self.assertEqual(util.find(42, [41, 42, 43, 44], test=(lambda x, y: x<y)), 43) self.assertEqual(util.find(42, [-40, -41, -42, -43], key=operator.neg, test=(lambda x, y: x<y)), -43)
def genWarpLocs(readfile = "warps_insert.j", headerfile = "locations_header.j", keyword = "//insert", cwd = "lua"): cwd = util.find(cwd, util.root) writefile = util.find(headerfile.replace("_header", "_insert"), util.root) headerfile = util.find(headerfile, util.root) names = util.getInsertFileData(readfile)[0] globalsStr = "" insertStr = "" for nameData in names: data = nameData.split("|") name = data[0] locName = name + " loc" idName = name + " id" if name not in special: globalsStr += "\tlocation " + util.name2Var(locName) + "\n" insertStr += "\tset " + util.name2Var(locName) + " = getNPCLoc(" + util.name2Var(idName) + ")\n" w = open(headerfile, 'r') t = w.read() w.close() t = t.replace("//globals", globalsStr) t = t.replace("//insert", insertStr) w = open(writefile, 'w') print>>w, t w.close()
def writeTable(constantsFiles = [], tableFile = "npc_unit_id_table_header.j", tableName = "npcUnitIdTable"): tableFile = util.find(tableFile, util.root) w = open(tableFile, 'r') t = w.read() w.close() tableStatements = "" for constantsFile in constantsFiles: names = util.getConstantsFileVars(constantsFile) for name in names: cell = "[" + name + "]" tableStatements += "\tset " + tableName + cell + " = " + name.replace("_ID", "") + "\n" t = t.replace("//insert", tableStatements) w = open(tableFile.replace("_header.j", "") + "_insert.j", 'w') print>>w, t w.close()
def addCredits(creditsFile): path = util.find(creditsFile, util.root) w = open(path, 'r') t = w.read() w.close() t = getCredit.sub("", t) header = "\tcall QuestItemSetDescription(QuestCreateItem(q), " bibTuple = bib.bibTuple() for pair in bibTuple: newHeader = header newHeader += '"' + pair[1] + " (" + pair[0] + ')")\n' t += newHeader w = open(path, 'w') print >> w, t w.close()
def insert(insertFile, insertDir, newFile, cwd, header, keywords=[], sort=False): fileNames = util.getFileNames(insertDir, cwd, keywords) if sort: fileNames = sorted(fileNames, key=lambda filename: os.stat(filename).st_ctime) insertFile = util.find(insertFile, util.find(cwd, util.root)) insertions = "" for fileName in fileNames: w = open(fileName, 'r') insertions += w.read() + "\n" w.close() w = open(insertFile, 'r') t = w.read().replace(header, insertions) insertFile = insertFile.replace( getInsertFileDir.findall(insertFile)[-1], '') w = open(os.path.join(insertFile, newFile), 'w') print >> w, t w.close()
def addCredits(creditsFile): path = util.find(creditsFile, util.root) w = open(path, 'r') t = w.read() w.close() t = getCredit.sub("", t) header = "\tcall QuestItemSetDescription(QuestCreateItem(q), " bibTuple = bib.bibTuple() for pair in bibTuple: newHeader = header newHeader += '"' + pair[1] + " (" + pair[0] + ')")\n' t += newHeader w = open(path, 'w') print>>w, t w.close()
def plotPoint(X, Z, Z_error, fig, ax, X_label, Z_label, legend, barSize, logX, logY, legendExtra="", pointType=0, generateRandomColor=False): # TODO to remove # if (legend.startswith("./posixGlibcIO") and legendExtra != "computeTime(0.0001)"): # return # TODO end to remove # TODO to remove # if (legendExtra == 'Iterations'): # return # TODO end to remove pointType = POINT_CORRESPONDENCE[pointType] if (generateRandomColor): col = generateMappedRandomColor() else: col = find(legend, COLOR_CORRESPONDENCE) if (col < 0): col = COLOR_DEFAULT else: col = COLOR_LIST[col] legend = legend + " (" + legendExtra + ")" ax.plot(X, Z, "-" + pointType, color=col, label=legend, markersize=7) if (Z_error != None): alpha_fill = 0.1 col = generateMappedRandomColor() ax.fill_between(X, Z_error[1], Z_error[0], color=col, alpha=alpha_fill) ax.set_ylabel(Z_label) ax.set_xlabel(X_label) if (logX): ax.set_xscale('log') if (logY): ax.set_yscale('log')
def add_job(self, job): j = SmapJob(job) self.jobs.append(j) if 'StartTime' in job: start = job['StartTime'] / 1000. wait = start - util.now() else: wait = 0 assert wait >= 0 actions = j.actions if j.after: previous_job = util.find(lambda x: x.name == j.after, self.jobs) if previous_job is None: raise SmapException("No job named %s") % j.after else: j.d_outer = previous_job.d_outer j.job_id = previous_job.job_id self._job_ids[job['uuid']] = j.job_id else: # assign it its own deferred j.d_outer = defer.Deferred() # closure that will carry out all of the job's actions def act(_): for action in actions: path = action['Path'] state = action['State'] actuator = self.inst.get_timeseries(path) print 'setting', path, 'to', state if j.async: j.d_inner.addCallback(lambda: actuator.set_state(None,state)) else: actuator.set_state(None, state) # queue the callback j.d_outer.addCallback(act) print 'added callback to', j.d_outer if not j.after: # job_id will let you cancel it # j.job_id = reactor.callLater(wait, act) j.job_id = reactor.callLater(wait, j.d_outer.callback, None) self._job_ids[job['uuid']] = j.job_id return j.d_outer
def build_js(): logger.info('Compiling closure templates') soys = list(util.find(JS_POSTILE, '*.soy')) subprocess.call(['java', '-jar', PROJ_ROOT / 'tmp' / 'SoyToJsSrcCompiler.jar', '--outputPathFormat', '{INPUT_DIRECTORY}/{INPUT_FILE_NAME}.js', '--shouldProvideRequireSoyNamespaces', '--srcs'] + soys) logger.info('Building deps.js') subprocess.call(['python', CLOSURE_BIN_BUILD / 'depswriter.py', '--root_with_prefix=%s %s' % (JS_POSTILE, '../postile'), '--output_file=%s' % (JS_POSTILE / 'deps.js')]) logger.info('Adding soyutil to deps.js') with open(JS_POSTILE / 'deps.js', 'a') as fout: with open(PROJ_ROOT / 'hacks' / 'soyutil-deps.js') as fin: fout.write(fin.read()) logger.info(util.colorize('Done', 'green'))
def marginalize(self, onto_node, print_opt=False): '''marginalize a node out by summing over its values :param node_onto: the node that are left after marginalization the index of the node in the graph, which is the node_id.index(node_index) th node in the potential ''' assert isinstance(onto_node, list), 'onto_node for marginalize must be\ a list' ns = np.zeros((1, np.max(self.nodes_idx)+1)) ns[0, self.nodes_idx] = self.sizes sum_over = np.setdiff1d(np.array(self.nodes_idx), np.array(onto_node)) if print_opt: print 'marginalize' print 'potential has nodes:' print self.nodes_idx print 'marginalize onto nodes:' print onto_node print 'so sum over nodes:' print sum_over # find out which dimension to sum out #dim_of_node_to_marg = self.noded_id.index(sum_over) ndx = [] smallT = self.values for i in sum_over: temp = find(np.array([np.array(self.nodes_idx) == i])) if print_opt: print 'its the dimension' print temp if temp.shape != (1,): ndx.append(temp[0, 0]) ndx = np.array(ndx) for i in xrange(0, len(ndx)): if ndx[i] < smallT.ndim: """Sum over the dimension ndx[i]""" smallT = np.sum(smallT, ndx[i]) """Compensate for reduced dimensions of smallT""" ndx = ndx - 1 smallpot = Potential(onto_node, ns[0, onto_node], smallT) return smallpot
def add_job(self, job): j = SmapJob(job) self.jobs.append(j) if "StartTime" in job: start = job["StartTime"] / 1000.0 wait = start - util.now() else: wait = 0 assert wait >= 0 actions = j.actions if j.after: previous_job = util.find(lambda x: x.name == j.after, self.jobs) if previous_job is None: raise SmapException("No job named %s") % j.after else: j.d_outer = previous_job.d_outer j.job_id = previous_job.job_id self._job_ids[job["uuid"]] = j.job_id else: # assign it its own deferred j.d_outer = defer.Deferred() # closure that will carry out all of the job's actions def act(_): for action in actions: path = action["Path"] state = action["State"] actuator = self.inst.get_timeseries(path) print "Setting", path, "to", state actuator.impl.set_state(None, state) # queue the callback j.d_outer.addCallback(act) print "Added callback to", j.d_outer if not j.after: # job_id will let you cancel it j.job_id = reactor.callLater(wait, j.d_outer.callback, None) self._job_ids[job["uuid"]] = j.job_id return j.d_outer
def som(X, eta, stddev, mapsize, tmax): """ som, a visualization method """ #--- prepare W ---# # W holds 1xd vectors # W is a mapsize x mapsize array W = np.empty((mapsize, mapsize), np.matrix) dim = len(X[0, :])# dimension for (idx, w) in enumerate(W): tmp = np.empty((mapsize), np.matrix) for i in range(0, len(tmp.T)): arr = np.random.random_sample(dim) tmp[i] = np.matrix(arr)# cast into matrix W[idx, :] = tmp #--- define gamma function ---# def gamma(r_win, r): tmp_vec = r_win - r return np.exp(- tmp_vec.dot(tmp_vec.T) / (2*stddev)) #--- update weights ---# for x in X: # prepare result variable scores = np.zeros((9,9)) for idx, w in enumerate(W): for i, u in enumerate(w): vec = x - u scores[idx, i] = vec * vec.T # iswinner => returns coordinates of the winner neuron iswinner = scores==np.max(scores)# => returns coordinates # update the weight vectors for idx, w in enumerate(W): for i, u in enumerate(w): W[idx, i] += eta * \ gamma(np.asarray([idx,i]), \ np.asarray(find( iswinner.tolist()))) \ * x return W
def find(self, kind, func): return util.find(self.unit.cursor, kind, func)
def next(self): # We're done. if self.index == len(self.songs): raise StopIteration name, diff, mods = self.songs[self.index] fullname = None a, b = 0, 0 if isinstance(diff, list): pass elif diff.find("..") != -1: a, b = map(int, diff.split("..")) elif len(diff) < 3: a, b = int(diff), int(diff) if a or b: diff = range(a, b + 1) # Check for player's best/worst/likes/dislikes. There are stored # as a tuple of (type, number). if name[0] == _("BEST"): s = self.recordkeys.get(records.best(name[1], diff, self.gametype), None) if s: fullname = s.filename if isinstance(diff, list): diff = [d for d in diff if d in s.difficulty[self.gametype]][0] elif name[0] == _("WORST"): s = self.recordkeys.get(records.worst(name[1], diff, self.gametype),None) if s: fullname = s.filename if isinstance(diff, list): diff = [d for d in diff if d in s.difficulty[self.gametype]][0] elif name[0] == _("LIKES"): s = self.recordkeys.get(records.like(name[1], diff, self.gametype),None) if s: fullname = s.filename if isinstance(diff, list): diff = [d for d in diff if d in s.difficulty[self.gametype]][0] elif name[0] == _("DISLIKES"): s = self.recordkeys.get(records.dislike(name[1], diff, self.gametype),None) if s: fullname = s.filename if isinstance(diff, list): diff = [d for d in diff if d in s.difficulty[self.gametype]][0] elif name[-1] == "*": # A random song # First pull out all the songs that we might be acceptable. if "/" in name: # Random song from a specific mix. folder, dummy = name.split("/") folder = folder.lower() if folder in self.all_songs: songs = [s for s in self.all_songs[folder].values() if (s,diff) not in self.past_songs] else: error.ErrorMessage(self.screen, folder + _(" was not found.")) raise StopIteration else: # Any random song. songs = [] for v in self.all_songs.values(): songs.extend(v.values()) songs = [s for s in songs if (s,diff) not in self.past_songs and self._find_difficulty(s, diff)] if len(songs) == 0: error.ErrorMessage(self.screen, _("No valid songs were found.")) raise StopIteration else: song = random.choice(songs) diff = self._find_difficulty(song, diff) fullname = song.filename # Let's try to find the damned song. # Unfortunately, it can be given as just a title(+subtitle), or a # mix with a title. Or a filename. That's why we need the # all_songs hash. else: for path in mainconfig["songdir"].split(os.pathsep): fn = os.path.join(path, name) fn = os.path.expanduser(fn) if os.path.isfile(fn): fullname = fn elif os.path.isdir(fn): file_list = util.find(fn, ["*.sm", "*.dwi"]) if len(file_list) != 0: fullname = file_list[0] if fullname: break if not fullname and len(name[0]) == 1: # Still haven't found it... folder, song = name.split("/") song = self.all_songs.get(folder.lower(), {}).get(song.lower()) if song: fullname = song.filename if not fullname: if len(name[0]) > 1: name = _("Player's %s #%d") % (name[0].capitalize(), name[1]) error.ErrorMessage(self.screen, name + _("was not found.")) raise StopIteration self.index += 1 self.past_songs.append((fullname,diff)) return (fullname, [diff] * len(self.player_configs))
import os import re import util root = util.find("Imports", "C:\\") getExtension = re.compile(r"(?<=\.).+$") getReadmeName = re.compile(r"\A[a-z0-9 \._]+", re.IGNORECASE) getReadmeAuthor = re.compile(r"(?<=by )[a-z0-9 \._]+", re.IGNORECASE) EXTENSIONS = ["txt"] def getBibNamesArr(l=[], cwd=root, extensions=EXTENSIONS): for fileName in os.listdir(cwd): path = os.path.join(cwd, fileName) if os.path.isfile(path): fileExt = getExtension.findall(path)[0] if fileExt in extensions: l.append(path) else: getBibNamesArr(l, path, extensions) def getBibNames(cwd=root, extensions=EXTENSIONS): l = [] getBibNamesArr(l, cwd, extensions) return l
def get_report(self, id): return util.find(lambda item: item['uuid'] == id, self.subscribers)
header += "\t//****************DEF INT STR ATT AGI HP MANA SP\n" header += "\tcall m.setAttrMin( 1, 1, 1, 1, 1, 1, 1, 1)\n" header += "\tcall m.setAttrMax( 1, 1, 1, 1, 1, 1, 1, 1)\n" header += "\tcall m.setAbilities(0, 1, 2)\n" header += "\tset AbstractMonsterTable[m.index] = m\n" getFamily = re.compile(r'(?<="unam", )[A-Z_]+') getName = re.compile(r'(?<="properNames", ")[a-z ]+', re.IGNORECASE) insert = util.find("monsters_insert.j", util.root) w = open(insert, 'r') t = w.read() l = getFamily.findall(t) r = getName.findall(t) r = [x[:-1] for x in r] w.close() mDir = util.find("abstract_monster_merge", util.root)
def find_git_dir(results_dir): return find(lambda f: os.path.exists(os.path.join(f, ".git" )), walk_dirs_up(results_dir))
import re import util import os KW = ["decrep", "merge", "header"] getItemName = re.compile(r'(?<= unam = ")[a-z ]+', re.IGNORECASE) getId = re.compile(r'(?<=itemId = \')[a-z0-9]+') getTotal = re.compile(r'(?<= i total = )[0-9]+') CONSTANTS_DIR = util.find("constants", util.root) def itemConstants(itemFile, constantsFile, cwd, keywords = KW, suffix = ""): cwd = util.find(cwd, util.root) fileName = util.find(itemFile, cwd) path = os.path.join(CONSTANTS_DIR, constantsFile) w = open(fileName, 'r') t = w.read() w.close() libName = itemFile.replace("_insert.j", "") header = "library " + libName.title() + "Constants\n" header = header.replace("_", "") header += "globals\n" itemId = getId.findall(t)[0] varNames = [util.name2Var(x) for x in getItemName.findall(t)]
def main(): print "pydance", VERSION, "<*****@*****.**> - irc.freenode.net/#pyddr" if mainconfig["usepsyco"]: try: import psyco print _("Psyco optimizing compiler found. Using psyco.full().") psyco.full() except ImportError: print _("W: Psyco optimizing compiler not found.") # default settings for play_and_quit. mode = "SINGLE" difficulty = "BASIC" test_file = None for opt, arg in getopt(sys.argv[1:], "hvf:d:m:", ["help", "version", "filename=", "difficulty=", "mode="])[0]: if opt in ["-h", _("--help")]: print_help() elif opt in ["-v", _("--version")]: print_version() elif opt in ["-f", _("--filename")]: test_file = arg elif opt in ["-m", _("--mode")]: mode = arg elif opt in ["-d", _("--difficulty")]: difficulty = arg if test_file: play_and_quit(test_file, mode, difficulty) song_list = {} course_list = [] for dir in mainconfig["songdir"].split(os.pathsep): print _("Searching for songs in"), dir song_list.update(util.find_songs(dir, ['*.dance', '*.sm', '*.dwi', '*/song.*'])) for dir in mainconfig["coursedir"].split(os.pathsep): print _("Searching for courses in"), dir course_list.extend(util.find(dir, ['*.crs'])) screen = set_display_mode() pygame.display.set_caption("pydance " + VERSION) pygame.mouse.set_visible(False) try: if os.path.exists("/usr/share/pixmaps/pydance.png"): icon = pygame.image.load("/usr/share/pixmaps/pydance.png") else: icon = pygame.image.load(os.path.join(pydance_path, "icon.png")) pygame.display.set_icon(icon) except: pass music.load(os.path.join(sound_path, "menu.ogg")) music.play(4, 0.0) songs = load_files(screen, song_list.values(), _("songs"), SongItem, (False,)) # Construct the song and record dictionaries for courses. These are # necessary because courses identify songs by title and mix, rather # than filename. The recordkey dictionary is needed for player's # picks courses. song_dict = {} record_dict = {} for song in songs: mix = song.info["mix"].lower() title = song.info["title"].lower() if song.info["subtitle"]: title += " " + song.info["subtitle"].lower() if not song_dict.has_key(mix): song_dict[mix] = {} song_dict[mix][title] = song record_dict[song.info["recordkey"]] = song crs = load_files(screen, [course_list], _("courses"), courses.CourseFile, (song_dict, record_dict)) crs.extend(courses.make_players(song_dict, record_dict)) records.verify(record_dict) # Let the GC clean these up if it needs to. song_list = None course_list = None record_dict = None pad.empty() if len(songs) < 1: ErrorMessage(screen, (_("You don't have any songs or step files. Check out " "http://icculus.org/pyddr/get.php#songs " "and download some free ones. " "If you already have some, make sure they're in ")) + mainconfig["songdir"]) raise SystemExit(_("You don't have any songs. Check http://icculus.org/pyddr/get.php#songs .")) menudriver.do(screen, (songs, crs, screen)) # Clean up shit. music.stop() pygame.quit() mainconfig.write(os.path.join(rc_path, "pydance.cfg")) # FIXME -- is this option a good idea? if mainconfig["saveinput"]: pad.write(os.path.join(rc_path, "input.cfg")) records.write()
#simulates monster stat growth for a given level. import re import os import util import random root = util.find("abstract_monster_merge", util.root) getMin = re.compile(r'(?<=Min\()[^\)]+') getMax = re.compile(r'(?<=Max\()[^\)]+') DEF = 0 INT = 1 STR = 2 ATT = 3 AGI = 4 HP = 5 MANA = 6 SP = 7 def re2Arr(arr): s = arr[0] s = s.replace(" ", "") return s.split(",") def simulate(arrMin, arrMax, lvl = 10): l = []