def reset_database(database=[], deployment_target=None): """Runs kubectl commands to delete and reset the given database(s). Args: component (list): one more database labels - "seqrdb", "phenotipsdb", "mongodb" deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. """ if "seqrdb" in database: postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if not postgres_pod_name: logger.error("postgres pod must be running") else: run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'" % locals(), errors_to_ignore=["does not exist"]) run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'" % locals()) if "phenotipsdb" in database: postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if not postgres_pod_name: logger.error("postgres pod must be running") else: run_in_pod(postgres_pod_name, "psql -U xwiki postgres -c 'drop database xwiki'" % locals(), errors_to_ignore=["does not exist"]) run_in_pod(postgres_pod_name, "psql -U xwiki postgres -c 'create database xwiki'" % locals()) #run("kubectl exec %(postgres_pod_name)s -- psql -U postgres xwiki < data/init_phenotipsdb.sql" % locals()) if "mongodb" in database: mongo_pod_name = get_pod_name("mongo", deployment_target=deployment_target) if not mongo_pod_name: logger.error("mongo pod must be running") else: run_in_pod(mongo_pod_name, "mongo datastore --eval 'db.dropDatabase()'" % locals())
def open_shell_in_component(component, deployment_target, shell_path='/bin/bash'): """Open a command line shell in the given component""" run_in_pod(component, shell_path, deployment_target=deployment_target, is_interactive=True)
def load_example_project(deployment_target, genome_version="37", cpu_limit=None, start_with_step=None): """Load example project Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. genome_version (string): reference genome version - either "37" or "38" """ project_name = "1kg" check_kubernetes_context(deployment_target) pod_name = get_pod_name('seqr', deployment_target=deployment_target) if not pod_name: raise ValueError( "No 'seqr' pod found. Is the kubectl environment configured in this terminal? and have either of these pods been deployed?" % locals()) run_in_pod( pod_name, "wget -N https://storage.googleapis.com/seqr-reference-data/test-projects/1kg.ped" % locals()) #run_in_pod(pod_name, "gsutil cp %(ped)s ." % locals()) # TODO call APIs instead? run_in_pod( pod_name, "python2.7 -u -m manage create_project -p '1kg.ped' '%(project_name)s'" % locals(), verbose=True) if genome_version == "37": vcf_filename = "1kg.vcf.gz" elif genome_version == "38": vcf_filename = "1kg.liftover.GRCh38.vep.vcf.gz" else: raise ValueError("Unexpected genome_version: %s" % (genome_version, )) load_dataset( deployment_target, project_name=project_name, genome_version=genome_version, sample_type="WES", dataset_type="VARIANTS", cpu_limit=cpu_limit, start_with_step=start_with_step, vcf= "https://storage.googleapis.com/seqr-reference-data/test-projects/%(vcf_filename)s" % locals())
def reset_database(database=[], deployment_target=None): """Runs kubectl commands to delete and reset the given database(s). Args: component (list): one more database labels - "seqrdb", "phenotipsdb", deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. """ if "seqrdb" in database: postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if not postgres_pod_name: logger.error("postgres pod must be running") else: run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'" % locals(), errors_to_ignore=["does not exist"]) run_in_pod( postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'" % locals()) if "phenotipsdb" in database: postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if not postgres_pod_name: logger.error("postgres pod must be running") else: run_in_pod(postgres_pod_name, "psql -U xwiki postgres -c 'drop database xwiki'" % locals(), errors_to_ignore=["does not exist"]) run_in_pod( postgres_pod_name, "psql -U xwiki postgres -c 'create database xwiki'" % locals())
def create_user(deployment_target, email=None, password=None): """Creates a seqr superuser. Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. email (string): if provided, user will be created non-interactively password (string): if provided, user will be created non-interactively """ check_kubernetes_context(deployment_target) if not email: run_in_pod("seqr", "python -u manage.py createsuperuser" % locals(), is_interactive=True) else: logger.info("Creating user %(email)s" % locals()) run_in_pod("seqr", """echo "from django.contrib.auth.models import User; User.objects.create_superuser('%(email)s', '%(email)s', '%(password)s')" \| python manage.py shell""" % locals(), print_command=False, errors_to_ignore=["already exists"])
def create_user(deployment_target, email=None, password=None): """Creates a seqr superuser. Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. email (string): if provided, user will be created non-interactively password (string): if provided, user will be created non-interactively """ check_kubernetes_context(deployment_target) if not email: run_in_pod("seqr", "python -u manage.py createsuperuser" % locals(), is_interactive=True) else: logger.info("Creating user %(email)s" % locals()) run_in_pod( "seqr", """echo "from django.contrib.auth.models import User; User.objects.create_superuser('%(email)s', '%(email)s', '%(password)s')" \| python manage.py shell""" % locals(), print_command=False, errors_to_ignore=["already exists"])
def load_example_project(deployment_target, genome_version="37", cpu_limit=None, start_with_step=None): """Load example project Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. genome_version (string): reference genome version - either "37" or "38" """ project_name = "1kg" check_kubernetes_context(deployment_target) pod_name = get_pod_name('seqr', deployment_target=deployment_target) if not pod_name: raise ValueError("No 'seqr' pod found. Is the kubectl environment configured in this terminal? and have either of these pods been deployed?" % locals()) run_in_pod(pod_name, "wget -N https://storage.googleapis.com/seqr-reference-data/test-projects/1kg.ped" % locals()) #run_in_pod(pod_name, "gsutil cp %(ped)s ." % locals()) # TODO call APIs instead? run_in_pod(pod_name, "python2.7 -u -m manage create_project -p '1kg.ped' '%(project_name)s'" % locals(), verbose=True) if genome_version == "37": vcf_filename = "1kg.vcf.gz" elif genome_version == "38": vcf_filename = "1kg.liftover.GRCh38.vep.vcf.gz" else: raise ValueError("Unexpected genome_version: %s" % (genome_version,)) load_dataset( deployment_target, project_name=project_name, genome_version=genome_version, sample_type="WES", dataset_type="VARIANTS", cpu_limit=cpu_limit, start_with_step=start_with_step, vcf="https://storage.googleapis.com/seqr-reference-data/test-projects/%(vcf_filename)s" % locals())
def load_dataset(deployment_target, project_name, genome_version, sample_type, dataset_type, vcf, memory_to_use=None, cpu_limit=None, **kwargs): """Load dataset into elasticsearch. """ pod_name = get_pod_name('pipeline-runner', deployment_target=deployment_target) # run load command additional_load_command_args = " ".join("--%s '%s'" % (key.lower().replace("_", "-"), value) for key, value in kwargs.items() if value is not None) if deployment_target == "minikube": vcf_name = os.path.basename(vcf) path_in_pod = "/data/{}".format(vcf_name) if os.path.isfile(vcf): run("kubectl cp '%(vcf)s' '%(pod_name)s:%(path_in_pod)s'" % locals()) # if local file path, copy file into pod elif vcf.startswith("http"): run_in_pod(pod_name, "wget -N %(vcf)s -O %(path_in_pod)s" % locals()) elif vcf.startswith("gs:"): run_in_pod(pod_name, "gsutil cp -n %(vcf)s %(path_in_pod)s" % locals()) vcf = path_in_pod total_memory = psutil.virtual_memory().total - 6*10**9 # leave 6Gb for other processes memory_to_use = "%sG" % (total_memory / 2 / 10**9) if memory_to_use is None else memory_to_use # divide available memory evenly between spark driver & executor cpu_limit = max(1, psutil.cpu_count() / 2) if cpu_limit is None else cpu_limit load_command = """/hail-elasticsearch-pipelines/run_hail_locally.sh \ --driver-memory %(memory_to_use)s \ --executor-memory %(memory_to_use)s \ hail_scripts/v01/load_dataset_to_es.py \ --cpu-limit %(cpu_limit)s \ --genome-version %(genome_version)s \ --project-guid %(project_name)s \ --sample-type %(sample_type)s \ --dataset-type %(dataset_type)s \ --skip-validation \ --exclude-hgmd \ --vep-block-size 100 \ --es-block-size 10 \ --num-shards 1 \ --max-samples-per-index 99 \ %(additional_load_command_args)s \ %(vcf)s """ % locals() else: load_command = """/hail-elasticsearch-pipelines/run_hail_on_dataproc.sh \ hail_scripts/v01/load_dataset_to_es.py \ --genome-version %(genome_version)s \ --project-guid %(project_name)s \ --sample-type %(sample_type)s \ --dataset-type %(dataset_type)s \ %(additional_load_command_args)s \ %(vcf)s """ % locals() run_in_pod(pod_name, load_command, verbose=True)
def load_dataset(deployment_target, project_name, genome_version, sample_type, dataset_type, vcf, memory_to_use=None, cpu_limit=None, **kwargs): """Load dataset into elasticsearch. """ pod_name = get_pod_name('pipeline-runner', deployment_target=deployment_target) # run load command additional_load_command_args = " ".join( "--%s '%s'" % (key.lower().replace("_", "-"), value) for key, value in kwargs.items() if value is not None) if deployment_target == "minikube": vcf_name = os.path.basename(vcf) path_in_pod = "/data/{}".format(vcf_name) if os.path.isfile(vcf): run("kubectl cp '%(vcf)s' '%(pod_name)s:%(path_in_pod)s'" % locals()) # if local file path, copy file into pod elif vcf.startswith("http"): run_in_pod(pod_name, "wget -N %(vcf)s -O %(path_in_pod)s" % locals()) elif vcf.startswith("gs:"): run_in_pod(pod_name, "gsutil cp -n %(vcf)s %(path_in_pod)s" % locals()) vcf = path_in_pod total_memory = psutil.virtual_memory( ).total - 6 * 10**9 # leave 6Gb for other processes memory_to_use = "%sG" % ( total_memory / 2 / 10**9 ) if memory_to_use is None else memory_to_use # divide available memory evenly between spark driver & executor cpu_limit = max(1, psutil.cpu_count() / 2) if cpu_limit is None else cpu_limit load_command = """/hail-elasticsearch-pipelines/run_hail_locally.sh \ --driver-memory %(memory_to_use)s \ --executor-memory %(memory_to_use)s \ hail_scripts/v01/load_dataset_to_es.py \ --cpu-limit %(cpu_limit)s \ --genome-version %(genome_version)s \ --project-guid %(project_name)s \ --sample-type %(sample_type)s \ --dataset-type %(dataset_type)s \ --skip-validation \ --exclude-hgmd \ --vep-block-size 100 \ --es-block-size 10 \ --num-shards 1 \ --max-samples-per-index 99 \ %(additional_load_command_args)s \ %(vcf)s """ % locals() else: load_command = """/hail-elasticsearch-pipelines/run_hail_on_dataproc.sh \ hail_scripts/v01/load_dataset_to_es.py \ --genome-version %(genome_version)s \ --project-guid %(project_name)s \ --sample-type %(sample_type)s \ --dataset-type %(dataset_type)s \ %(additional_load_command_args)s \ %(vcf)s """ % locals() run_in_pod(pod_name, load_command, verbose=True)
def update_reference_data(deployment_target): """DEPRECATED. Load reference data into mongodb. Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. """ check_kubernetes_context(deployment_target) pod_name = get_pod_name('seqr', deployment_target=deployment_target) if not pod_name: raise ValueError( "No 'seqr' pods found. Is the kubectl environment configured in this terminal? and have either of these pods been deployed?" % locals()) # commented out because this is not loaded from settings backup #run_in_pod(pod_name, "python2.7 -u manage.py update_all_reference_data --omim-key '$OMIM_KEY'" % locals(), verbose=True, print_command=True) run_in_pod(pod_name, "mkdir -p /seqr/data/reference_data") run_in_pod( pod_name, "wget -N https://storage.googleapis.com/seqr-reference-data/seqr-resource-bundle.tar.gz -O /seqr/data/reference_data/seqr-resource-bundle.tar.gz" ) run_in_pod( pod_name, "tar xzf /seqr/data/reference_data/seqr-resource-bundle.tar.gz -C /seqr/data/reference_data", verbose=True) run_in_pod(pod_name, "rm /seqr/data/reference_data/seqr-resource-bundle.tar.gz") # load legacy resources run_in_pod(pod_name, "python -u manage.py load_resources", verbose=True) run_in_pod(pod_name, "python -u manage.py load_omim", verbose=True)
def deploy_seqr(settings): print_separator("seqr") if settings["BUILD_DOCKER_IMAGES"]: seqr_git_hash = run("git log -1 --pretty=%h", errors_to_ignore=["Not a git repository"]) seqr_git_hash = ( ":" + seqr_git_hash.strip()) if seqr_git_hash is not None else "" docker_build("seqr", settings, [ "--build-arg SEQR_SERVICE_PORT=%s" % settings["SEQR_SERVICE_PORT"], "--build-arg SEQR_UI_DEV_PORT=%s" % settings["SEQR_UI_DEV_PORT"], "-f deploy/docker/seqr/Dockerfile", "-t %(DOCKER_IMAGE_NAME)s" + seqr_git_hash, ]) if settings["ONLY_PUSH_TO_REGISTRY"]: return restore_seqr_db_from_backup = settings.get("RESTORE_SEQR_DB_FROM_BACKUP") reset_db = settings.get("RESET_DB") deployment_target = settings["DEPLOY_TO"] postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if settings["DELETE_BEFORE_DEPLOY"]: delete_pod("seqr", settings) elif reset_db or restore_seqr_db_from_backup: seqr_pod_name = get_pod_name('seqr', deployment_target=deployment_target) if seqr_pod_name: sleep_until_pod_is_running("seqr", deployment_target=deployment_target) run_in_pod(seqr_pod_name, "/usr/local/bin/stop_server.sh", verbose=True) if reset_db: run_in_pod( postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'", errors_to_ignore=["does not exist"], verbose=True, ) if restore_seqr_db_from_backup: run_in_pod( postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'", errors_to_ignore=["does not exist"], verbose=True, ) run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'", verbose=True) run("kubectl cp '%(restore_seqr_db_from_backup)s' %(postgres_pod_name)s:/root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) run_in_pod( postgres_pod_name, "/root/restore_database_backup.sh postgres seqrdb /root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) run_in_pod(postgres_pod_name, "rm /root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) else: run_in_pod( postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'", errors_to_ignore=["already exists"], verbose=True, ) deploy_pod("seqr", settings, wait_until_pod_is_ready=True)
def deploy_phenotips(settings): print_separator("phenotips") phenotips_service_port = settings["PHENOTIPS_SERVICE_PORT"] restore_phenotips_db_from_backup = settings.get( "RESTORE_PHENOTIPS_DB_FROM_BACKUP") reset_db = settings.get("RESET_DB") deployment_target = settings["DEPLOY_TO"] if reset_db or restore_phenotips_db_from_backup: delete_pod("phenotips", settings) run_in_pod( "postgres", "psql -U postgres postgres -c 'drop database xwiki'" % locals(), verbose=True, errors_to_ignore=["does not exist"], deployment_target=deployment_target, ) elif settings["DELETE_BEFORE_DEPLOY"]: delete_pod("phenotips", settings) # init postgres if not settings["ONLY_PUSH_TO_REGISTRY"]: run_in_pod( "postgres", "psql -U postgres postgres -c \"create role xwiki with CREATEDB LOGIN PASSWORD 'xwiki'\"" % locals(), verbose=True, errors_to_ignore=["already exists"], deployment_target=deployment_target, ) run_in_pod( "postgres", "psql -U xwiki postgres -c 'create database xwiki'" % locals(), verbose=True, errors_to_ignore=["already exists"], deployment_target=deployment_target, ) run_in_pod( "postgres", "psql -U postgres postgres -c 'grant all privileges on database xwiki to xwiki'" % locals(), ) # build container docker_build( "phenotips", settings, ["--build-arg PHENOTIPS_SERVICE_PORT=%s" % phenotips_service_port]) if settings["ONLY_PUSH_TO_REGISTRY"]: return deploy_pod("phenotips", settings, wait_until_pod_is_ready=True) for i in range(0, 3): # opening the PhenoTips website for the 1st time triggers a final set of initialization # steps which take ~ 1 minute, so run wget to trigger this try: run_in_pod( "phenotips", #command="wget http://localhost:%(phenotips_service_port)s -O test.html" % locals(), command= "curl --verbose -L -u Admin:admin http://localhost:%(phenotips_service_port)s -o test.html" % locals(), verbose=True) except Exception as e: logger.error(str(e)) if i < 2: logger.info("Waiting for phenotips to start up...") time.sleep(10) if restore_phenotips_db_from_backup: delete_pod("phenotips", settings) postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) run("kubectl cp '%(restore_phenotips_db_from_backup)s' %(postgres_pod_name)s:/root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), verbose=True) run_in_pod( "postgres", "/root/restore_database_backup.sh xwiki xwiki /root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), deployment_target=deployment_target, verbose=True) run_in_pod( "postgres", "rm /root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), deployment_target=deployment_target, verbose=True) deploy_pod("phenotips", settings, wait_until_pod_is_ready=True)
def open_shell_in_component(component, deployment_target, shell_path='/bin/bash'): """Open a command line shell in the given component""" run_in_pod(component, shell_path, deployment_target=deployment_target, is_interactive=True)
def deploy_seqr(settings): print_separator("seqr") if settings["BUILD_DOCKER_IMAGES"]: seqr_git_hash = run("git log -1 --pretty=%h", errors_to_ignore=["Not a git repository"]) seqr_git_hash = (":" + seqr_git_hash.strip()) if seqr_git_hash is not None else "" docker_build("seqr", settings, [ "--build-arg SEQR_SERVICE_PORT=%s" % settings["SEQR_SERVICE_PORT"], "--build-arg SEQR_UI_DEV_PORT=%s" % settings["SEQR_UI_DEV_PORT"], "-f deploy/docker/seqr/Dockerfile", "-t %(DOCKER_IMAGE_NAME)s" + seqr_git_hash, ] ) if settings["ONLY_PUSH_TO_REGISTRY"]: return restore_seqr_db_from_backup = settings.get("RESTORE_SEQR_DB_FROM_BACKUP") reset_db = settings.get("RESET_DB") deployment_target = settings["DEPLOY_TO"] postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) if settings["DELETE_BEFORE_DEPLOY"]: delete_pod("seqr", settings) elif reset_db or restore_seqr_db_from_backup: seqr_pod_name = get_pod_name('seqr', deployment_target=deployment_target) if seqr_pod_name: sleep_until_pod_is_running("seqr", deployment_target=deployment_target) run_in_pod(seqr_pod_name, "/usr/local/bin/stop_server.sh", verbose=True) if reset_db: run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'", errors_to_ignore=["does not exist"], verbose=True, ) if restore_seqr_db_from_backup: run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'drop database seqrdb'", errors_to_ignore=["does not exist"], verbose=True, ) run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'", verbose=True) run("kubectl cp '%(restore_seqr_db_from_backup)s' %(postgres_pod_name)s:/root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) run_in_pod(postgres_pod_name, "/root/restore_database_backup.sh postgres seqrdb /root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) run_in_pod(postgres_pod_name, "rm /root/$(basename %(restore_seqr_db_from_backup)s)" % locals(), verbose=True) else: run_in_pod(postgres_pod_name, "psql -U postgres postgres -c 'create database seqrdb'", errors_to_ignore=["already exists"], verbose=True, ) deploy_pod("seqr", settings, wait_until_pod_is_ready=True)
def deploy_phenotips(settings): print_separator("phenotips") phenotips_service_port = settings["PHENOTIPS_SERVICE_PORT"] restore_phenotips_db_from_backup = settings.get("RESTORE_PHENOTIPS_DB_FROM_BACKUP") reset_db = settings.get("RESET_DB") deployment_target = settings["DEPLOY_TO"] if reset_db or restore_phenotips_db_from_backup: delete_pod("phenotips", settings) run_in_pod("postgres", "psql -U postgres postgres -c 'drop database xwiki'" % locals(), verbose=True, errors_to_ignore=["does not exist"], deployment_target=deployment_target, ) elif settings["DELETE_BEFORE_DEPLOY"]: delete_pod("phenotips", settings) # init postgres if not settings["ONLY_PUSH_TO_REGISTRY"]: run_in_pod("postgres", "psql -U postgres postgres -c \"create role xwiki with CREATEDB LOGIN PASSWORD 'xwiki'\"" % locals(), verbose=True, errors_to_ignore=["already exists"], deployment_target=deployment_target, ) run_in_pod("postgres", "psql -U xwiki postgres -c 'create database xwiki'" % locals(), verbose=True, errors_to_ignore=["already exists"], deployment_target=deployment_target, ) run_in_pod("postgres", "psql -U postgres postgres -c 'grant all privileges on database xwiki to xwiki'" % locals(), ) # build container docker_build("phenotips", settings, ["--build-arg PHENOTIPS_SERVICE_PORT=%s" % phenotips_service_port]) if settings["ONLY_PUSH_TO_REGISTRY"]: return deploy_pod("phenotips", settings, wait_until_pod_is_ready=True) for i in range(0, 3): # opening the PhenoTips website for the 1st time triggers a final set of initialization # steps which take ~ 1 minute, so run wget to trigger this try: run_in_pod("phenotips", #command="wget http://localhost:%(phenotips_service_port)s -O test.html" % locals(), command="curl --verbose -L -u Admin:admin http://localhost:%(phenotips_service_port)s -o test.html" % locals(), verbose=True ) except Exception as e: logger.error(str(e)) if i < 2: logger.info("Waiting for phenotips to start up...") time.sleep(10) if restore_phenotips_db_from_backup: delete_pod("phenotips", settings) postgres_pod_name = get_pod_name("postgres", deployment_target=deployment_target) run("kubectl cp '%(restore_phenotips_db_from_backup)s' %(postgres_pod_name)s:/root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), verbose=True) run_in_pod("postgres", "/root/restore_database_backup.sh xwiki xwiki /root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), deployment_target=deployment_target, verbose=True) run_in_pod("postgres", "rm /root/$(basename %(restore_phenotips_db_from_backup)s)" % locals(), deployment_target=deployment_target, verbose=True) deploy_pod("phenotips", settings, wait_until_pod_is_ready=True)
def update_reference_data(deployment_target): """DEPRECATED. Load reference data into mongodb. Args: deployment_target (string): value from DEPLOYMENT_TARGETS - eg. "minikube", "gcloud-dev", etc. """ check_kubernetes_context(deployment_target) pod_name = get_pod_name('seqr', deployment_target=deployment_target) if not pod_name: raise ValueError("No 'seqr' pods found. Is the kubectl environment configured in this terminal? and have either of these pods been deployed?" % locals()) # commented out because this is not loaded from settings backup #run_in_pod(pod_name, "python2.7 -u manage.py update_all_reference_data --omim-key '$OMIM_KEY'" % locals(), verbose=True, print_command=True) run_in_pod(pod_name, "mkdir -p /seqr/data/reference_data") run_in_pod(pod_name, "wget -N https://storage.googleapis.com/seqr-reference-data/seqr-resource-bundle.tar.gz -O /seqr/data/reference_data/seqr-resource-bundle.tar.gz") run_in_pod(pod_name, "tar xzf /seqr/data/reference_data/seqr-resource-bundle.tar.gz -C /seqr/data/reference_data", verbose=True) run_in_pod(pod_name, "rm /seqr/data/reference_data/seqr-resource-bundle.tar.gz") # load legacy resources run_in_pod(pod_name, "python -u manage.py load_resources", verbose=True) run_in_pod(pod_name, "python -u manage.py load_omim", verbose=True)