コード例 #1
0
ファイル: meta.py プロジェクト: BinglanLi/galaxy
def __expand_collection_parameter( trans, input_key, incoming_val, collections_to_match ):
    # If subcollectin multirun of data_collection param - value will
    # be "hdca_id|subcollection_type" else it will just be hdca_id
    if "|" in incoming_val:
        encoded_hdc_id, subcollection_type = incoming_val.split( "|", 1 )
    else:
        try:
            src = incoming_val[ "src" ]
            if src != "hdca":
                raise exceptions.ToolMetaParameterException( "Invalid dataset collection source type %s" % src )
            encoded_hdc_id = incoming_val[ "id" ]
            subcollection_type = incoming_val.get( 'map_over_type', None )
        except TypeError:
            encoded_hdc_id = incoming_val
            subcollection_type = None
    hdc_id = trans.app.security.decode_id( encoded_hdc_id )
    hdc = trans.sa_session.query( model.HistoryDatasetCollectionAssociation ).get( hdc_id )
    collections_to_match.add( input_key, hdc, subcollection_type=subcollection_type )
    if subcollection_type is not None:
        from galaxy.dataset_collections import subcollections
        subcollection_elements = subcollections.split_dataset_collection_instance( hdc, subcollection_type )
        return subcollection_elements
    else:
        hdas = []
        for element in hdc.collection.dataset_elements:
            hda = element.dataset_instance
            hda.element_identifier = element.element_identifier
            hdas.append( hda )
        return hdas
コード例 #2
0
ファイル: meta.py プロジェクト: msGenDev/galaxy-central
 def collection_classifier( input_key ):
     multirun_key = "%s|__collection_multirun__" % input_key
     if multirun_key in incoming:
         incoming_val = incoming[ multirun_key ]
         # If subcollectin multirun of data_collection param - value will
         # be "hdca_id|subcollection_type" else it will just be hdca_id
         if "|" in incoming_val:
             encoded_hdc_id, subcollection_type = incoming_val.split( "|", 1 )
         else:
             try:
                 src = incoming_val[ "src" ]
                 if src != "hdca":
                     raise exceptions.ToolMetaParameterException( "Invalid dataset collection source type %s" % src )
                 encoded_hdc_id = incoming_val[ "id" ]
             except TypeError:
                 encoded_hdc_id = incoming_val
             subcollection_type = None
         hdc_id = trans.app.security.decode_id( encoded_hdc_id )
         hdc = trans.sa_session.query( model.HistoryDatasetCollectionAssociation ).get( hdc_id )
         collections_to_match.add( input_key, hdc, subcollection_type=subcollection_type )
         if subcollection_type is not None:
             from galaxy.dataset_collections import subcollections
             subcollection_elements = subcollections.split_dataset_collection_instance( hdc, subcollection_type )
             return permutations.input_classification.MATCHED, subcollection_elements
         else:
             hdas = hdc.collection.dataset_instances
             return permutations.input_classification.MATCHED, hdas
     else:
         return permutations.input_classification.SINGLE, incoming[ input_key ]
コード例 #3
0
ファイル: meta.py プロジェクト: roalva1/galaxy
def __expand_collection_parameter(trans, input_key, incoming_val,
                                  collections_to_match):
    # If subcollectin multirun of data_collection param - value will
    # be "hdca_id|subcollection_type" else it will just be hdca_id
    if "|" in incoming_val:
        encoded_hdc_id, subcollection_type = incoming_val.split("|", 1)
    else:
        try:
            src = incoming_val["src"]
            if src != "hdca":
                raise exceptions.ToolMetaParameterException(
                    "Invalid dataset collection source type %s" % src)
            encoded_hdc_id = incoming_val["id"]
            subcollection_type = incoming_val.get('map_over_type', None)
        except TypeError:
            encoded_hdc_id = incoming_val
            subcollection_type = None
    hdc_id = trans.app.security.decode_id(encoded_hdc_id)
    hdc = trans.sa_session.query(
        model.HistoryDatasetCollectionAssociation).get(hdc_id)
    collections_to_match.add(input_key,
                             hdc,
                             subcollection_type=subcollection_type)
    if subcollection_type is not None:
        from galaxy.dataset_collections import subcollections
        subcollection_elements = subcollections.split_dataset_collection_instance(
            hdc, subcollection_type)
        return subcollection_elements
    else:
        hdas = hdc.collection.dataset_instances
        return hdas
コード例 #4
0
ファイル: meta.py プロジェクト: hidelab/galaxy-central-hpc
 def collection_classifier(input_key):
     multirun_key = "%s|__collection_multirun__" % input_key
     if multirun_key in incoming:
         incoming_val = incoming[multirun_key]
         # If subcollectin multirun of data_collection param - value will
         # be "hdca_id|subcollection_type" else it will just be hdca_id
         if "|" in incoming_val:
             encoded_hdc_id, subcollection_type = incoming_val.split("|", 1)
         else:
             try:
                 src = incoming_val["src"]
                 if src != "hdca":
                     raise exceptions.ToolMetaParameterException(
                         "Invalid dataset collection source type %s" % src)
                 encoded_hdc_id = incoming_val["id"]
             except TypeError:
                 encoded_hdc_id = incoming_val
             subcollection_type = None
         hdc_id = trans.app.security.decode_id(encoded_hdc_id)
         hdc = trans.sa_session.query(
             model.HistoryDatasetCollectionAssociation).get(hdc_id)
         collections_to_match.add(input_key,
                                  hdc,
                                  subcollection_type=subcollection_type)
         if subcollection_type is not None:
             from galaxy.dataset_collections import subcollections
             subcollection_elements = subcollections.split_dataset_collection_instance(
                 hdc, subcollection_type)
             return permutations.input_classification.MATCHED, subcollection_elements
         else:
             hdas = hdc.collection.dataset_instances
             return permutations.input_classification.MATCHED, hdas
     else:
         return permutations.input_classification.SINGLE, incoming[
             input_key]