def _init(): """ This must be called once before any work """ RadiometryContext.struct_fixup() if local.env.get("PLASTER_C_COMPILE"): with local.cwd(c_radiometry_path): fp = StringIO() with redirect_stdout(fp): print( f"// This file was code-generated by sigproc_v2.c_radiometry.c_radiometry.load_lib and should be version controlled" ) print() print("#ifndef RADIOMETRY_H") print("#define RADIOMETRY_H") print() print('#include "stdint.h"') print('#include "c_common.h"') print() RadiometryContext.struct_emit_header(fp) print("#endif") header_file_path = "./_radiometry.h" existing_h = utils.load(header_file_path, return_on_non_existing="") if existing_h != fp.getvalue(): utils.save(header_file_path, fp.getvalue()) build_dev()
def _init(): """ Must be called before anything else in this module """ SurveyV2Context.struct_fixup() if local.env.get("PLASTER_C_COMPILE"): with local.cwd(lib_folder): fp = StringIO() with redirect_stdout(fp): print( f"// This file was code-generated by survey_v2.c.survey_v2.init and should be version controlled" ) print() print("#ifndef SIM_V2_H") print("#define SIM_V2_H") print() print('#include "stdint.h"') print('#include "c_common.h"') print() SurveyV2Context.struct_emit_header(fp) print("#endif") header_file_path = "./_survey_v2.h" existing_h = utils.load(header_file_path, return_on_non_existing="") if existing_h != fp.getvalue(): utils.save(header_file_path, fp.getvalue()) build_dev()
def zest_survey_integration(): """ Show that a survey gen and run can execute """ csv_file = "/tmp/__zest_survey_integration.csv" utils.save( csv_file, utils.smart_wrap( """ Name,Seq,Abundance,POI pep0,ALNCLVMQL,1,1 pep1,APHGVVFL,1,1 pep2,KIADYNYML,1,1 pep3,MLPDDFTGC,4,1 pep4,CCQSLQTYV,1,1 pep5,TLMSKTQSL,1,1 pep6,VLCMNQKLI,1,1 pep7,ACCDFTAKV,1,0 """, assert_if_exceeds_width=True, ), ) local["p"]["gen", "survey", "--sample=zest_survey_integration", f"--protein_csv={csv_file}", "--label_set=C,M", "--n_pres=1", "--n_mocks=0", "--n_edmans=15", "--force", "--job=./jobs_folder/__zest_survey_integration", ] & FG local["p"]["run", "./jobs_folder/__zest_survey_integration"] & FG zest()
def _init(): """ Must be called before anything else in this module """ NNV2Context.struct_fixup() if local.env.get("PLASTER_C_COMPILE"): with local.cwd(c_nn_v2_path): fp = StringIO() with redirect_stdout(fp): print( f"// This file was code-generated by nn_v2.c.nn_v2.load_lib and should be version controlled" ) print() print("#ifndef NN_V2_H") print("#define NN_V2_H") print() print('#include "pthread.h"') print('#include "stdint.h"') print('#include "c_common.h"') print() NNV2ScoringVerboseFields.emit_col_name_defines(fp) print() NNV2Context.struct_emit_header(fp) print("#endif") header_file_path = "./_nn_v2.h" existing_h = utils.load(header_file_path, return_on_non_existing="") if existing_h != fp.getvalue(): utils.save(header_file_path, fp.getvalue()) build_dev()
def _init(): """ Must be called before anything else in this module """ SimV2Context.struct_fixup() # Dyt.struct_fixup() PCB.struct_fixup() Counts.struct_fixup() DyePepRec.struct_fixup() if local.env.get("PLASTER_C_COMPILE"): with local.cwd(lib_folder): fp = StringIO() with redirect_stdout(fp): print( f"// This file was code-generated by sim_v2.c.sim_v2.init and should be version controlled" ) print() print("#ifndef SIM_V2_H") print("#define SIM_V2_H") print() print('#include "stdint.h"') print('#include "c_common.h"') print() print( textwrap.dedent("""\ typedef struct { Size count; Index dyt_i; DyeType chcy_dye_counts[]; // Note, this is a variable sized record // See dyt_* functions for manipulating it } Dyt; // Dye-track record """)) print() SimV2Context.struct_emit_header(fp) # Dyt.struct_emit_header(fp) PCB.struct_emit_header(fp) Counts.struct_emit_header(fp) print("#endif") header_file_path = "./_sim_v2.h" existing_h = utils.load(header_file_path, return_on_non_existing="") if existing_h != fp.getvalue(): utils.save(header_file_path, fp.getvalue()) build_dev()
def cache_source(cache_folder, source, copy_to): """ If this is a URL or S3 fetch and cache at cache_folder. Local files can be loaded from //jobs_folder/... only. In all cases, the cached file is optionally copied to copy_to so that that job folders contain a copy of all gen source data. Returns: the contents of the file """ def make_cache_path(): local.path(cache_folder).mkdir() return ( local.path(cache_folder) / hashlib.md5(source.encode("utf-8")).hexdigest() ) file_contents = None if source.startswith("http://") or source.startswith("https://"): cache_path = make_cache_path() if not cache_path.exists(): important(f"Fetching {source}... (TO {cache_path})") file_contents = _url_get(source) utils.save(cache_path, file_contents) else: file_contents = utils.load(cache_path) elif source.startswith("s3://"): cache_path = make_cache_path() if not cache_path.exists(): tell(f"Syncing from {source} to {cache_path}") local["aws"]["s3", "cp", source, cache_path] & FG # s3 cp already saves it to disk, fall thru & load file_contents = utils.load(cache_path) else: file_contents = utils.load(source) assert file_contents is not None if copy_to: assert local.path(copy_to).exists() filename = local.path(source).basename utils.save(copy_to / filename, file_contents) return file_contents
def lnfit(lnfit_params, sigproc_result): csv = _alex_track_photometries_csv(sigproc_result, lnfit_params.dye_on_threshold) # This photometry_filename will get mounted into the container photometry_filename = "track_photometries.csv" utils.save(photometry_filename, csv) if not lnfit_params.photometry_only: # If we're running in a docker context then the path will start with # /app, which we need to substitute for the real host OS path. # This has to come from the environment since it might vary from host to host data_folder = os.environ.get("HOST_PLASTER_DATA_FOLDER", "./jobs_folder") data_folder = os.path.join(data_folder, "") # Adds a slash if needed lnfit_path = str(local.path(".")).replace("/app/jobs_folder/", data_folder) def run_docker_command(command): local["bash"]["-c", utils.get_ecr_login_string()] & FG aws_creds = [] if local.env.get("ON_AWS", "0") == "0": aws_creds = [ f"--env", f"AWS_ACCESS_KEY_ID={local.env['AWS_ACCESS_KEY_ID']}", f"--env", f"AWS_SECRET_ACCESS_KEY={local.env['AWS_SECRET_ACCESS_KEY']}", f"--env", f"AWS_DEFAULT_REGION={local.env['AWS_DEFAULT_REGION']}", ] local["docker"][ [ f"run", f"-it", *aws_creds, f"--mount", f"type=bind,source={lnfit_path},target=/lnfit", f"188029688209.dkr.ecr.us-east-1.amazonaws.com/alex:latest", f"bash", f"-c", command, ] ] & FG(retcode=None) container_command = ( f"cd /home/proteanseq " f"&& python ./pflib/lognormal_fitter_v2.py " f" {lnfit_params.lognormal_fitter_v2_params} " f" /lnfit/{photometry_filename} " f" >/lnfit/LN.OUT 2>/lnfit/LN.ERR" ) try: run_docker_command(container_command) except CommandNotFound as e: raise LnFitError return LNFitResult( params=lnfit_params, photometry_rows=csv.count("\n") - 1, dye_on_threshold=lnfit_params.dye_on_threshold, did_fit=not lnfit_params.photometry_only, )