コード例 #1
0
# Example script to execute a NIST Workflow

from gov.nasa.jpl.edrn.labcas.labcas_client import LabcasClient

if __name__ == '__main__':
    
    # submit workflow
    # ./wmgr-client --url http://localhost:9001 --operation --sendEvent --eventName nist 
    #               --metaData --key DatasetName Lab005_C_R03 --key LabNumber 005 
    #               --key NormalizationMethod C --key RoundNumber 003 --key LeadPI Johns 
    #               --key DataCollectionDate 20160101 --key SampleProcessingProtocols 'With water and ammonia' 
    #               --key InstrumentationTechnologyCode NGS --key Manufacturer TexasInstruments 
    #               --key ModelNumber XYZ123 --key DataProcessingProtocols 'Crunching data' 
    #               --key OwnerGroup Lab005_OwnerPrincipal
    labcasClient = LabcasClient()

    workflowTasks = ['urn:edrn:NistInitTask',
                     'urn:edrn:NistConvertTask',
                     'urn:edrn:NistExecTask',
                     'urn:edrn:NistCrawlTask']
    
    metadata = {'DatasetName':'Lab005_C_R03',
                'LabNumber':'005',
                'Method':'C',
                'RoundNumber':'003',
                'LeadPI':'Johns', 
                'DataCollectionDate':'20160101',
                'NewVersion':'false' }

    # upload dataset without changing the version
    wInstId = labcasClient.executeWorkflow(workflowTasks, metadata)
コード例 #2
0
# Python script for programmatic execution of the AF workflow
# (and consequent publishing of UnivColoLungImages)

from gov.nasa.jpl.edrn.labcas.labcas_client import LabcasClient

if __name__ == '__main__':

    # upload these datasets    
    #dataset_names = ['UCHSC_1467', 'UCHSC_8798']
    #dataset_names = ['UCHSC_1467']
    dataset_names = ['UCHSC_8798']
    product_type = 'University_of_Colorado_Lung_Image'
    
    for dataset_name in dataset_names:
    
        # submit workflow
        # ./wmgr-client --url http://localhost:9001 --operation --sendEvent --eventName waf --metaData --key DatasetId UCHSC_1001
        labcasClient = LabcasClient()
        wInstId = labcasClient.executeWorkflow(['urn:edrn:WafInitTask',
                                                'urn:edrn:WafCrawlTask'], 
                                               { 'DatasetName':dataset_name },
                                               )
        # monitor workflow instance
        labcasClient.waitForCompletion(wInstId)
        
        # list all products for given product type
        labcasClient.listProducts(product_type)
コード例 #3
0
# Example script to execute the "labcas-test" workflow

from gov.nasa.jpl.edrn.labcas.labcas_client import LabcasClient

if __name__ == '__main__':
    
    # submit workflow
    # ./wmgr-client --url http://localhost:9001 --operation --sendEvent --eventName labcas-test --metaData --key experiment 11 --key species snakes
    labcasClient = LabcasClient()
    wInstId = labcasClient.executeWorkflow(['urn:edrn:LabcasTestInit','urn:edrn:LabcasTestTask'], 
                                           {'experiment':'11', 
                                            'species':'snakes' } )

    # monitor workflow instance
    labcasClient.waitForCompletion(wInstId)

    # list all files of this product type
    labcasClient.listProducts('LabCAS_Test_Product')
コード例 #4
0
from gov.nasa.jpl.edrn.labcas.labcas_client import LabcasClient

if __name__ == '__main__':
    
    # required input metadata    
    collection_name = 'My Data Collection'
    collection_description = 'This is my precious data collection'
    dataset_name = 'Best Dataset'
    dataset_description = 'The Best Dataset of this collection'
    owner_principal = 'uid=testuser,dc=edrn,dc=jpl,dc=nasa,dc=gov',
    
    # NOTE: data must be uploaded to directory $LABCAS_STAGING/<product_type>/<dataset_id>
    product_type = collection_name.replace(' ','_')
    #dataset_id = dataset_name.replace(' ','_') 
    labcasClient = LabcasClient()
    
    # print out workflow definition
    #labcasClient.getWorkflowsByEvent("labcas-upload")
    # or equivalently
    labcasClient.getWorkflowById("urn:edrn:LabcasUploadWorkflow")
    
    # required metadata fields
    #./wmgr-client --url http://localhost:9001 --operation --sendEvent --eventName labcas-upload --metaData 
    # --key DatasetId mydata --key ProductType 'MyData' --key Description 'My own data' 
    # --key ProtocolId 1 --key LeadPI 'John Doe' --key ProtocolName 'GSTP1 Methylation' --key OrganSite Lung --key OwnerPrincipal EDRN_CANCER_GROUP
    # --key DataCustodian 'Rich Smith' --key DataCustodianEmail '*****@*****.**' --key CollaborativeGroup 'Prostate and Urologic'
    metadata = { # required metadata
                 'CollectionName':collection_name,
                 'CollectionDescription':collection_description,
                 'DatasetName': dataset_name,
コード例 #5
0
ファイル: upload_hanash.py プロジェクト: EDRN/labcas-backend
# Example Python script to upload Hanash data

from gov.nasa.jpl.edrn.labcas.labcas_client import LabcasClient

if __name__ == '__main__':
        
    # datasetId must match the directory name where the data is staged on the server: $LABCAS_STAGING/$datasetId
    datasetId = 'FHCRCHanashAnnexinLamr'
    labcasClient = LabcasClient()
        
    # product type metadata (to be submitted as part of upload workflow)
    metadata = { 
                 # required
                 'DatasetName':'Autoantibody Biomarkers',
                 'ProtocolId':'138',
                 'ProtocolName':'Validation of Protein Markers for Lung Cancer Using CARET Sera and Proteomics Techniques',
                 'LeadPI':'Samir Hanash',
                 'DataCustodian':'Ji Qiu',
                 'DataCustodianEmail':'*****@*****.**',
                 'CollaborativeGroup':'Lung and Upper Aerodigestive',
                 'OwnerPrincipal':'/Samir/Hanash',
                 # optional
                 'OrganSite':'Lung',
                 'SiteName':'Fred Hutchinson Cancer Research Center (Biomarker Developmental Laboratories)',
                 'SiteShortName':'FHCRC',
                 'QAState':'Accepted',
                 'PubMedId':'http://www.ncbi.nlm.nih.gov/pubmed/18794547',
                 'DateDatasetFrozen':'2007/05/29',
    }