def build_editor(ctx): """Builds and configures the editor""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline(SENTINEL_SCRIPT_PATH, ["run-module", "ue4", "build", "editor"], data) utilities.run_cmd(cmd)
def build_editor(ctx): """Builds and configures the editor""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./Sentinel.py", ["run-module", "ue4", "build", "editor"], data) utilities.run_cmd(cmd)
def from_fst_to_file(in_fst, out_file, in_lex="all.lex", home_path=""): result = open(out_file, "w") run_cmd("fstprint --isymbols=" + home_path + in_lex + " --osymbols=" + home_path + "all.lex " + in_fst + " | cat - | sort -r -n ", result) result.close() return
def validate_packages(ctx): """Checks project packages for errors""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "ue4", "project", "refresh-asset-info"], data) utilities.run_cmd(cmd)
def create_lex(in_file, out_lex): temp = open("tem.lex", "w") run_cmd("cat " + in_file + "| cut -f 1,2 ", temp) temp.close() lexicon_file = open(out_lex, "w") exit_stat = run_cmd('ngramsymbols tem.lex', lexicon_file) lexicon_file.close() return
def commands(ctx, args): """ Utility commands""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./commands.py", args, data) utilities.run_cmd(cmd)
def vcs(ctx, args): """Interact with the Version Control System""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./sentinel_vcs/vcs_cli.py", args, data) utilities.run_cmd(cmd)
def ue4(ctx, args): """Interact with UE4""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline(SENTINEL_UE4, args, data) utilities.run_cmd(cmd)
def environment(ctx, args): """Info about the local environment""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline(SENTINEL_ENVIRONMENT, args, data) utilities.run_cmd(cmd)
def generate_lightmaps(ctx): """Generates lightmaps for the project""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "ue4", "project", "commandlet"], data, sub_command_arguments=["--task=Build-Lighting"]) utilities.run_cmd(cmd)
def generate_ddc_cache(ctx): """Refreshes the DDC cache for the project""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "ue4", "project", "commandlet"], data, sub_command_arguments=["--task=generate-ddc-cache"]) utilities.run_cmd(cmd)
def validate_blueprints(ctx): """Check all blueprints in the project for errors""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "ue4", "project", "commandlet"], data, sub_command_arguments=["--task=Compile-Blueprints"]) utilities.run_cmd(cmd)
def refresh_config(ctx, default=False): data = utilities.convert_input_to_dict(ctx) defult_arg = str(default).lower() generate_config_cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "environment", "generate"], data, sub_command_arguments=["--default=" + defult_arg]) utilities.run_cmd(generate_config_cmd)
def fix_up_redirectors(ctx): """Runs the commandlet fix up redirectors""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( SENTINEL_SCRIPT_PATH, ["run-module", "ue4", "project", "commandlet"], data, sub_command_arguments=["--task=fixup-redirectors"]) utilities.run_cmd(cmd)
def validate_project(ctx): """Checks the dev environment for errors""" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./Sentinel.py", ["run-module", "ue4", "build", "editor"], data) # If the compile blueprints step fails then it returns an error code, we want the step to exit succesfully utilities.run_cmd(cmd, overwrite_exit_code=0)
def database(ctx, args): """Interact with the Database""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./SentinelDB/SentinelDB.py", args, data) utilities.run_cmd(cmd)
def ue4(ctx, args): """Interact with UE4""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./SentinelUE4/SentinelUE4.py ", args, data) utilities.run_cmd(cmd)
def environment(ctx, args): """Info about the local environment""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./SentinelEnvironment/SentinelEnvironment.py", args, data) utilities.run_cmd(cmd)
def run_module(ctx, args): """modules in isolation""" data = utilities.convert_input_to_dict(ctx) if not args: args = "--help" cmd = utilities.get_commandline(script_name="./standalone.py", script_commands=args, global_arguments=data) utilities.run_cmd(cmd)
def aws(ctx, args): """ Interact with Amazon Web Services """ if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./SentinelAWS/SentinelAWS.py", args, data, arguments_at_end=False) utilities.run_cmd(cmd)
def run_action(ctx, args): """actions and commands""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline(script_name="./commands.py", script_commands=args, global_arguments=data) utilities.run_cmd(cmd)
def run_query(ctx, args): """fetch information from the environment""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline(script_name="./queries.py", script_commands=args, global_arguments=data) utilities.run_cmd(cmd)
def run_action(ctx, args): """actions and commands""" if not args: args = "--help" data = utilities.convert_input_to_dict(ctx) script = root_folder.joinpath("commands.py").as_posix() cmd = utilities.get_commandline(script_name=script, script_commands=args, global_arguments=data) utilities.run_cmd(cmd)
def build_client(ctx, preset, deploy_path, compress): """Builds and configures playable client""" global_args = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline( "./Sentinel.py", ["run-module", "ue4", "build", "client"], global_arguments=global_args, sub_command_arguments=["--preset=" + preset]) utilities.run_cmd(cmd) if deploy_path: if deploy_path.lower.startswith("s3"): print("Dealing with s3 path") elif os.path.exists(deploy_path): print("Dealing with an os path") else: print("Unable to access access")
def predict(sentence, ordered_labels, all_labels, labels_prob=[], home_path="", stop_words={}, add_prior=False): from_phrase_to_fst(sentence, home_path + "NB/sentence.fst", home_path) from_fst_to_file(home_path + "NB/sentence.fst", home_path + "NB/sentence.txt", "all.lex", home_path) with open(home_path + "NB/sentence_complete.txt", "w") as sentence_complete: run_cmd("cat " + home_path + "NB/sentence.txt | cut -f 3,4", sentence_complete) with open(home_path + "NB/sentence_final.txt", "w") as sentence_complete: run_cmd("crf_test -v 1 -m " + home_path + "crf.lm " + home_path + "NB/sentence_complete.txt ", sentence_complete) #remove_stop_words("NB/sentence_final.txt", "NB/sentence_final.txt", stop_words) create_bigrams(home_path + "NB/sentence_final.txt", home_path + "NB/sentence_bigrams.txt", True) replace_word_with_concept(home_path + "NB/sentence_final.txt", home_path + "NB/token+crf.txt", 2, False) create_bigrams(home_path + "NB/token+crf.txt", home_path + "NB/token+crf.bigrams.txt", True, '\t', 2) sums = [look_into_this(home_path + "NB/sentence_bigrams.txt", all_labels, "bi_word", home_path), look_into_this(home_path + "NB/sentence_bigrams.txt", all_labels, "bi_pos", home_path)] results = np.sum(sums, axis=0) value = np.min(results) index = results.tolist().index(value) indexes = np.argpartition(results, 3)[:3] elem = {} temp = [math.exp(-x) for x in results] sum_of_probs = float(sum(temp)) for ind in indexes: elem[ordered_labels[ind]] = math.exp(-results[ind])/sum_of_probs if add_prior: results = [math.exp(-x) for x in results] labels_prob = [x * x for x in labels_prob] #using the priors results = np.multiply(results, labels_prob) index = results.tolist().index(max(results)) result = {} result["other_classes"] = elem result["main_class"] = str(math.exp(-value)/sum_of_probs) + "\t" + ordered_labels[index] return result
def setup(ctx, project_root, engine_root, config_root, artifact_name, vcs_root, artifacts_root, cache_root): """first time environment""" input_arguments = [ "--project_name=" + project_root, "--engine_path=" + engine_root, "--artifact_name=" + artifact_name, "--config_path=" + config_root, "--version_control_root=" + vcs_root, "--artifacts_root=" + artifacts_root, "--cache_path=" + cache_root ] global_args = utilities.convert_input_to_dict(ctx) script = root_folder.joinpath("Sentinel.py").as_posix() generate_default_config_cmd = utilities.get_commandline( script, ["run-module", "environment", "make-default-config"], global_args, input_arguments) utilities.run_cmd(generate_default_config_cmd) commands.refresh_config(ctx)
def evaluate(pred_file, test_file_labels, test_file, result_file): with open("NB/temp.txt", "w") as temp, open(result_file, "w") as results: with open("NB/temp_complete", "w") as temp2: run_cmd("paste " + pred_file + " " + test_file_labels, temp) run_cmd("paste " + test_file + " " + temp.name, temp2) run_cmd("perl eval/conlleval.pl -d '\t' -r -o NOEXIST < " + temp2.name, results) temp.close() os.remove("NB/temp.txt")
def from_file_to_fst(in_file, out_fst_name, in_lex_file, home_path=""): fst_compile(in_file, "OutputData/intermediateSentence/sentence.fst", in_lex_file, home_path) fst_compose(home_path + "OutputData/intermediateSentence/sentence.fst", home_path + "third.fst", home_path + "OutputData/intermediateSentence/sentence_compose.fst") fst_compose(home_path + "OutputData/intermediateSentence/sentence_compose.fst", home_path +"OutputData/intermediateFst/pos.lm", home_path + "OutputData/intermediateSentence/sentence_compose_2.fst") out_fst = open(out_fst_name, "w") exit_stat = run_cmd("fstrmepsilon " + home_path + "OutputData/intermediateSentence/sentence_compose_2.fst | fstshortestpath", out_fst) out_fst.close() return
def count_n_grams(in_file, out_file, num): out_cnt = open(out_file, "w") run_cmd("ngramcount --order=" + str(num) + " --require_symbols=false " + in_file, out_cnt) out_cnt.close() return
if not os.path.isfile("third.fst"): exit_stat = subprocess.call("python createFST.py " + train_file, shell=True) phrases = [] if "col" in type: phrases = from_file_to_phrases(input_file) else: new_train = open(input_file, "r") phrases = [x.strip() for x in new_train] new_train.close() if os.path.isfile(output_file): os.remove(output_file) test_result = open(output_file, 'a') counter = 1 for phrase in phrases: from_phrase_to_fst(phrase, "short_sentence_compose.fst") from_fst_to_file("short_sentence_compose.fst", "fstprinted.txt") run_cmd("cat fstprinted.txt", test_result) print(str(counter) + " over " + str(len(phrases))) counter += 1 test_result.close() #subprocess.call("python evaluate.py " + correct_labels + " " + test_result.name, shell=True) #show_fst("short_sentence_compose.fst", "B.png")
def toolchain_specific_setup(args): log("info", "Running android-specific setup") cmd = "pacman -S --noconfirm wget sudo" run_cmd(cmd, as_root=True) # wget and curl output unsuitable progress bars even when not # connected to a TTY. Turn them off. with open("/etc/wgetrc", "a") as f: print("verbose = off", file=f) with open("/etc/.curlrc", "a") as f: print("silent", file=f) print("show-error", file=f) log("info", "Downloading & unpacking NDK") os.chdir("/home/tuscan") setup_file = "/home/tuscan/ndk.bin" cmd = "wget -O %s" " http://dl.google.com/android/ndk/android-" "ndk-r10e-linux-x86_64.bin" % (setup_file) run_cmd(cmd) cmd = "chmod +x " + setup_file run_cmd(cmd) run_cmd(setup_file, output=False) log("info", "Setting up toolchain") cmd = ( "/home/tuscan/android-ndk-r10e/build/tools/" "make-standalone-toolchain.sh" " --arch=arm --platform=android-21 " " --install-dir=" + "/toolchain_root" ) run_cmd(cmd) cmd = "chown -R tuscan: " + "/toolchain_root" run_cmd(cmd, as_root=True) cmd = "chown -R tuscan: /home/tuscan/android-ndk-r10e" run_cmd(cmd, as_root=True) for f in os.listdir(os.path.join("/toolchain_root", "bin")): f = os.path.join("/toolchain_root", "bin", f) cmd = "chmod a+rx %s" % f run_cmd(cmd, as_root=True)
for lines in pos: words = lines.split("\t") unknw_fst.write("0\t0\t<unk>\t" + words[0] + "\t" + str(-1 * math.log(1.0 / float(number_of_tokens))) + "\n") unknw_fst.write("0") unknw_fst.close() pos.close() create_lex(name_train_token_pos_lemma, "all.lex") fst_compile("OutputData/intermediateFst/token_pos_fst.prefst", "OutputData/intermediateFst/first.fst") fst_compile("OutputData/intermediateFst/unkwn_fst.txt", "OutputData/intermediateFst/unkwn_fst.fst") fst_union("OutputData/intermediateFst/unkwn_fst.fst", "OutputData/intermediateFst/first.fst", "second.fst") new_file = open("third.fst", "w") run_cmd("fstclosure second.fst", new_file) new_file.close() # Pos pharases created pos_phrase = open("OutputData/count_prob/pos_sentence.txt", "w") run_cmd( "cat " + name_train_token_pos_lemma + " | cut -f 2 | sed 's/^ *$/#/g' | tr '\n' ' ' | tr '#' '\n' | sed 's/^ *//g;s/ *$//g' ", pos_phrase) pos_phrase.close() text_to_FAR("OutputData/count_prob/pos_sentence.txt", "OutputData/intermediateFst/pos.far") create_lm("OutputData/intermediateFst/pos.far", "OutputData/intermediateFst/pos.lm", 4) """ # for Lemmas
def fst_union(first_file, second_file, out): out_fst = open(out, "w") exit_stat = run_cmd('fstunion ' + first_file + " " + second_file, out_fst) out_fst.close() return
def toolchain_specific_setup(args): log("info", "Running android-specific setup") if not os.path.isdir("/sysroot"): os.mkdir("/sysroot") recursive_chown("/sysroot") # wget and curl output unsuitable progress bars even when not # connected to a TTY. Turn them off. with open("/etc/wgetrc", "a") as f: print("verbose = off", file=f) with open("/etc/.curlrc", "a") as f: print("silent", file=f) print("show-error", file=f) log("info", "Downloading & unpacking NDK") os.chdir("/home/tuscan") setup_file = "/home/tuscan/ndk.bin" cmd = ("wget -O %s" " http://dl.google.com/android/ndk/android-" "ndk-r10e-linux-x86_64.bin" % (setup_file)) run_cmd(cmd) cmd = "chmod +x " + setup_file run_cmd(cmd) run_cmd(setup_file, output=False) log("info", "Setting up toolchain") cmd = ("/home/tuscan/android-ndk-r10e/build/tools/" "make-standalone-toolchain.sh" " --arch=arm --platform=android-21 " " --install-dir=" + "/sysroot") run_cmd(cmd) cmd = "chown -R tuscan: " + "/sysroot" run_cmd(cmd, as_root=True) cmd = "chown -R tuscan: /home/tuscan/android-ndk-r10e" run_cmd(cmd, as_root=True) bindirs = [ "/sysroot/bin", "/sysroot/libexec/gcc/arm-linux-androideabi/4.8" ] for d in bindirs: for f in os.listdir(d): f = os.path.join(d, f) cmd = "chmod a+rx %s" % f run_cmd(cmd, as_root=True) for f in os.listdir("/sysroot"): if os.path.isdir(os.path.join("/sysroot", f)): shutil.copytree(os.path.join("/sysroot", f), os.path.join("/toolchain_root", f)) elif os.path.isfile(os.path.join("/sysroot", f)): shutil.copy(os.path.join("/sysroot", f), "/toolchain_root") recursive_chown("/toolchain_root")
def create_lm(in_file, out_file_name, num=3, metod="witten_bell"): count_name = "OutputData/count_prob/pos_sen.cnt" count_n_grams(in_file, count_name, num) out_file = open(out_file_name, "w") run_cmd("ngrammake --method=" + metod + " " + count_name, out_file) return
def fst_compose(first, second, out): out_fst = open(out, "w") exit_stat = run_cmd('fstcompose ' + first + ' ' + second, out_fst) out_fst.close()
def fst_compile(in_file, out_file, i_lex_file="all.lex", home_path=""): out_fst = open(home_path + out_file, "w") exit_stat = run_cmd('fstcompile --isymbols=' + home_path + i_lex_file + ' --osymbols=' + home_path + 'all.lex ' + in_file, out_fst) out_fst.close() return
def main(): """Install vanilla bootstrap packages from local mirror. Installing all the bootstrap packages is a lengthy (and highly disk-IO bound, thus serializing) procedure, so it's best to do it only once. Instead of having each container running the make_package stage installing the boostrap packages, we install the bootstrap packages in this container and then base the make_package containers on the image of this container. """ parser = get_argparser() args = parser.parse_args() # GPG takes time. Remove package signature checks. lines = [] with open("/etc/pacman.conf") as f: for line in f: if re.search("SigLevel", line): lines.append("SigLevel = Never") else: lines.append(line.strip()) with open("/etc/pacman.conf", "w") as f: for line in lines: print(line.strip(), file=f) name_data_file = os.path.join(args.shared_directory, "get_base_package_names", "latest", "names.json") with open(name_data_file) as f: name_data = json.load(f) bootstrap_packs = name_data["base"] + name_data["base_devel"] + name_data["tools"] + ["sloccount"] vanilla = "file://" + args.mirror_directory + "/$repo/os/$arch" log("info", "Printing %s to mirrorlist" % vanilla) with open("/etc/pacman.d/mirrorlist", "w") as f: print("Server = " + vanilla, file=f) cmd = "pacman -Syy --noconfirm" time = timestamp() cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) log("command", cmd, cp.stdout.splitlines(), time) if cp.returncode: exit(1) cmd = "pacman -Su --noconfirm " + " ".join(bootstrap_packs) time = timestamp() cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) log("command", cmd, cp.stdout.splitlines(), time) if cp.returncode: exit(1) run_cmd("useradd -m -s /bin/bash tuscan", as_root=True) # User 'tuscan' needs to be able to use sudo without being harassed # for passwords) and so does root (to su into tuscan) with open("/etc/sudoers", "a") as f: print("tuscan ALL=(ALL) NOPASSWD: ALL", file=f) print("root ALL=(ALL) NOPASSWD: ALL", file=f) # Download and install bear with tempfile.TemporaryDirectory() as d: url = "https://github.com/karkhaz/Bear/blob/master/" "bear-2.1.5-1-x86_64.pkg.tar.xz?raw=true" response = urllib.request.urlopen(url) tar_file = response.read() pkg_name = "bear.pkg.tar.xz" with open(os.path.join(d, pkg_name), "wb") as f: f.write(tar_file) os.chdir(d) cmd = "pacman -U --noconfirm %s" % pkg_name cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) log("command", cmd, cp.stdout.splitlines()) if cp.returncode: exit(1) os.mkdir("/toolchain_root") shutil.chown("/toolchain_root", "tuscan") # Replace native tools with thin wrappers with open("/build/tool_redirect_rules.yaml") as f: transforms = yaml.load(f) execs = transforms["overwrite"] + list(transforms["replacements"].keys()) for e in set(execs): execs.remove(e) if execs: log( "error", ("The following executables have been specified " "twice in the tool_redirect_rules.yaml: %s" % str(execs)), ) exit(1) for e in transforms["overwrite"]: transforms["replacements"][e] = e transforms.pop("overwrite", None) jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(["/build"])) wrapper_temp = jinja.get_template("tool_wrapper.c") with tempfile.TemporaryDirectory() as tmp_dir: for native, toolchain in transforms["replacements"].items(): wrapper = wrapper_temp.render( native_program=native, toolchain_prefix=transforms["prefix"], toolchain_program=toolchain ) with tempfile.NamedTemporaryFile("w", suffix=".c") as temp: temp.write(wrapper) temp.flush() cmd = "gcc -o %s %s" % (os.path.join(tmp_dir, native), temp.name) proc = subprocess.Popen( cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True ) out, _ = proc.communicate() if proc.returncode: body = "%s\n%s\n%s" % (cmd, out, wrapper) log("error", "Failed to compile compiler wrapper", body=body) exit(1) for wrapper in os.listdir(tmp_dir): shutil.move(os.path.join(tmp_dir, wrapper), os.path.join("/usr/bin", wrapper)) setup.toolchain_specific_setup(args) exit(0)
def get_commit_id(ctx): """Returns the current commit ID""" global_args = utilities.convert_input_to_dict(ctx) cmd = utilities.get_commandline("./Sentinel.py ", ["run-module", "vcs", "get-current-commit-id"]) utilities.run_cmd(cmd)
def text_to_FAR(infile, outfile): out_far = open(outfile, "w") run_cmd("farcompilestrings --symbols=all.lex --unknown_symbol='<unk>' " + infile, out_far) out_far.close() return