# Submit the Beta Diversity jobs
     try:
         # Attempt the submission
         submitQiimeJob(meta_id, user_id, job_type, job_input, data_access)
     
     except Exception, e:
         raise ValueError,e
 
 # Generate and Submit Heatmap Job        
 if 'heatmap' in analyses_to_start:
     job_type='makeOTUHeatmap'
     
     # Submit the Beta Diversity jobs
     try:
         # Attempt the submission
         submitQiimeJob(meta_id, user_id, job_type, job_input, data_access)
     
     except Exception, e:
         raise ValueError,e
 
 # Generate and Submit Alpha-Diversity Job
 if 'arare' in analyses_to_start:
     job_type='alphaRarefaction'
     
     # Submit the Alpha Diversity jobs
     try:
         # Attempt the submission
         submitQiimeJob(meta_id, user_id, job_type, job_input, data_access)
     
     except Exception, e:
         raise ValueError,e
def run_other_qiime_analysis(data_access, fs_fp, web_fp, otu_table_filepath, 
                             map_filepath, file_name_prefix, user_id, meta_id, 
                             params_path, rarefied_at, jobs_to_start, tree_fp, 
                             zip_fpath, zip_fpath_db):

    # get the date to put in the db
    run_date=datetime.now().strftime("%d/%m/%Y/%H/%M/%S")
    
    # Prepare the params for submitting new jobs to the torque-poller
    params=[]
    params.append('fs_fp=%s' % fs_fp)
    params.append('web_fp=%s' % web_fp)
    params.append('otu_table_fp=%s' % otu_table_filepath)
    params.append('mapping_file_fp=%s' % map_filepath)
    params.append('fname_prefix=%s' % file_name_prefix)
    params.append('user_id=%s' % user_id)
    params.append('meta_id=%s' % meta_id)
    params.append('params_path=%s' % params_path)
    params.append('bdiv_rarefied_at=%s' % rarefied_at)
    params.append('jobs_to_start=%s' % jobs_to_start)
    params.append('tree_fp=%s' % tree_fp)
    params.append('run_date=%s' % run_date)
    params.append('zip_fpath=%s' % zip_fpath)
    params.append('zip_fpath_db=%s' % zip_fpath_db)
    job_input='!!'.join(params)
    
    # Determine which meta-analyses the user selected 
    analyses_to_start=jobs_to_start.split(',')
    
    # Prepare TopiaryExplorer job
    if 'showTE' in analyses_to_start:
        tree_fpath=path.abspath('%s/software/gg_otus_4feb2011/trees/gg_97_otus_4feb2011.tre' % (os.environ['HOME']))
        python_exe_fp = qiime_config['python_exe_fp']
        commands=[]
        command_handler=call_commands_serially
        status_update_callback=no_status_updates
        logger = WorkflowLogger(generate_log_fp('/tmp/'),
                               params=dict(''),
                               qiime_config=qiime_config)
        
        #define topiary explorer fpaths
        jnlp_fname=path.splitext(path.split(otu_table_filepath)[-1])[0]+'.jnlp'
        tep_fname=path.splitext(path.split(otu_table_filepath)[-1])[0] + '.tep'
        jnlp_filepath_web=path.join(web_fp, 'topiaryexplorer_files', jnlp_fname)
        jnlp_filepath_web_tep=path.join(web_fp,'topiaryexplorer_files', 
                                        tep_fname)
        
        # define the hard-link for the JNLP 
        if ServerConfig.home=='/home/wwwdevuser/':
            host_name='http://webdev.microbio.me/qiime'
        else:
            host_name='http://www.microbio.me/qiime'
            
        jnlp_filepath_web_tep_url=path.join(host_name, jnlp_filepath_web_tep)
        output_dir=os.path.join(fs_fp, 'topiaryexplorer_files')
        
        #build command
        make_tep_cmd='%s %s/make_tep.py -i %s -m %s -t %s -o %s -u %s -w' %\
        (python_exe_fp, script_dir, otu_table_filepath, map_filepath, 
         tree_fpath, output_dir, jnlp_filepath_web_tep_url)
        
        commands.append([('Make TopiaryExplorer jnlp', make_tep_cmd)])

        # Call the command handler on the list of commands
        command_handler(commands, status_update_callback, logger)
        
        #zip Topiary Explorer jnlp file
        cmd_call='cd %s; zip %s %s' % (output_dir,zip_fpath,jnlp_fname)
        system(cmd_call)
        
        #zip Topiary Explorer project file
        cmd_call='cd %s; zip %s %s' % (output_dir,zip_fpath,tep_fname)
        system(cmd_call)
        
        valid=data_access.addMetaAnalysisFiles(True, int(meta_id), 
                                               jnlp_filepath_web, 'OTUTABLE', 
                                               run_date, 'TOPIARYEXPLORER')
        if not valid:
            raise ValueError, 'There was an issue uploading the filepaths to the DB!'
            
    # Generate and Submit Beta-Diversity Job
    if 'bdiv' in analyses_to_start:
        job_type='betaDiversityThroughPlots'

        # Submit the Beta Diversity jobs
        try:
            # Attempt the submission
            submitQiimeJob(meta_id, user_id, job_type, job_input, data_access)
        
        except Exception, e:
            raise ValueError,e
def main():
    option_parser, opts, args =\
       parse_command_line_parameters(**script_info)

    
    submit_to_test_db=opts.submit_to_test_db
    fasta_file_paths=opts.fasta_file_paths
    study_id=opts.study_id
    output_dir=opts.output_dir
    platform=opts.platform
    user_id=opts.user_id
    
    if submit_to_test_db == 'False':
        # Load the data into the database
        data_access = data_access_factory(ServerConfig.data_access_type)
    else:
        # Load the data into the database 
        data_access = data_access_factory(DataAccessType.qiime_test)

    # Get all of the fasta files
    if (platform=='FLX' or platform=='TITANIUM'):
        print 'Submitting SFF data to database...'
        analysis_id, input_dir, seq_run_id, split_lib_input_md5sum = \
            submit_sff_and_split_lib(data_access, fasta_file_paths, study_id)
    elif platform=='ILLUMINA':
        print 'Submitting Illumina data to database...'
        analysis_id, input_dir, seq_run_id, split_lib_input_md5sum = \
            submit_illumina_and_split_lib(data_access, fasta_file_paths,
                                          study_id,output_dir)
    elif platform=='FASTA':
        print 'Submitting FASTA data to database...'
        analysis_id, input_dir, seq_run_id, split_lib_input_md5sum = \
            submit_fasta_and_split_lib(data_access, fasta_file_paths, 
                                       study_id, output_dir)
    
    '''
    # Submit Split-library loading job
    split_library_id=load_split_lib_sequences(data_access,input_dir,
                                              analysis_id, seq_run_id,
                                              split_lib_input_md5sum)
    '''

    # verify that these are not metagenomic sequences
    # otherwise load the OTU table
    study_info=data_access.getStudyInfo(study_id,user_id)
    if study_info['investigation_type'].lower() == 'metagenome':
        # skip OTU loading
        pass
    else:
        print 'Submitting OTU data to database...'
        load_otu_mapping(data_access, output_dir, analysis_id)
    

    params=[]
    params.append('OutputDir=%s' % output_dir)
    params.append('UserId=%s' % user_id)
    params.append('StudyId=%s' % study_id)
    params.append('TestDB=%s' % submit_to_test_db)
    params.append('ProcessedFastaFilepath=%s' % (fasta_file_paths))
    params.append('AnalysisId=%s' % analysis_id)
    params.append('SeqRunId=%s' % seq_run_id)
    params.append('MDchecksum=%s' % split_lib_input_md5sum)
    job_input='!!'.join(params)
    job_type='LoadSplitLibSeqsHandler'


    submitQiimeJob(study_id, user_id, job_type, job_input, data_access)
    
    print 'Completed database loading.'