Example #1
0
def expand_meta_parameters(trans, tool, incoming):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    to_remove = []
    for key in incoming.keys():
        if key.endswith("|__identifier__"):
            to_remove.append(key)
    for key in to_remove:
        incoming.pop(key)

    def classifier(input_key):
        value = incoming[input_key]
        if isinstance(value, dict) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get('batch', False)
            is_linked = value.get('linked', True)
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter(value):
                collection_value = value['values'][0]
                values = __expand_collection_parameter(trans,
                                                       input_key,
                                                       collection_value,
                                                       collections_to_match,
                                                       linked=is_linked)
            else:
                values = value['values']
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    expanded_incomings = permutations.expand_multi_inputs(
        incoming_template, classifier)
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections(
            collections_to_match)
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #2
0
def expand_meta_parameters( trans, tool, incoming ):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    to_remove = []
    for key in incoming.keys():
        if key.endswith("|__identifier__"):
            to_remove.append(key)
    for key in to_remove:
        incoming.pop(key)

    def classifier( input_key ):
        value = incoming[ input_key ]
        if isinstance( value, dict ) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get( 'batch', False )
            is_linked = value.get( 'linked', True )
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter( value ):
                collection_value = value[ 'values' ][ 0 ]
                values = __expand_collection_parameter( trans, input_key, collection_value, collections_to_match, linked=is_linked )
            else:
                values = value[ 'values' ]
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    expanded_incomings = permutations.expand_multi_inputs( incoming_template, classifier )
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections( collections_to_match )
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #3
0
def expand_meta_parameters(trans, tool, incoming):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    for key in list(incoming.keys()):
        if key.endswith("|__identifier__"):
            incoming.pop(key)

    # If we're going to multiply input dataset combinations
    # order matters, so the following reorders incoming
    # according to tool.inputs (which is ordered).
    incoming_copy = incoming.copy()
    nested_dict = {}
    for incoming_key, incoming_value in incoming_copy.items():
        if not incoming_key.startswith('__'):
            process_key(incoming_key,
                        incoming_value=incoming_value,
                        d=nested_dict)

    reordered_incoming = OrderedDict()

    def visitor(input, value, prefix, prefixed_name, prefixed_label, error,
                **kwargs):
        if prefixed_name in incoming_copy:
            reordered_incoming[prefixed_name] = incoming_copy[prefixed_name]
            del incoming_copy[prefixed_name]

    visit_input_values(inputs=tool.inputs,
                       input_values=nested_dict,
                       callback=visitor)
    reordered_incoming.update(incoming_copy)

    def classifier(input_key):
        value = incoming[input_key]
        if isinstance(value, dict) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get('batch', False)
            is_linked = value.get('linked', True)
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter(value):
                collection_value = value['values'][0]
                values = __expand_collection_parameter(trans,
                                                       input_key,
                                                       collection_value,
                                                       collections_to_match,
                                                       linked=is_linked)
            else:
                values = value['values']
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    collections_to_match = matching.CollectionsToMatch()

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = reordered_incoming

    expanded_incomings = permutations.expand_multi_inputs(
        incoming_template, classifier)
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections(
            collections_to_match)
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #4
0
def expand_meta_parameters( trans, tool, incoming ):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    to_remove = []
    for key in incoming.keys():
        if key.endswith("|__identifier__"):
            to_remove.append(key)
    for key in to_remove:
        incoming.pop(key)

    def classify_unmodified_parameter( input_key ):
        value = incoming[ input_key ]
        if isinstance( value, dict ) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get( 'batch', False )
            is_linked = value.get( 'linked', True )
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter( value ):
                collection_value = value[ 'values' ][ 0 ]
                values = __expand_collection_parameter( trans, input_key, collection_value, collections_to_match )
            else:
                values = value[ 'values' ]
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    def classifier( input_key ):
        collection_multirun_key = "%s|__collection_multirun__" % input_key
        multirun_key = "%s|__multirun__" % input_key
        if multirun_key in incoming:
            multi_value = util.listify( incoming[ multirun_key ] )
            if len( multi_value ) > 1:
                return permutations.input_classification.MATCHED, multi_value
            else:
                if len( multi_value ) == 0:
                    multi_value = None
                return permutations.input_classification.SINGLE, multi_value[ 0 ]
        elif collection_multirun_key in incoming:
            incoming_val = incoming[ collection_multirun_key ]
            values = __expand_collection_parameter( trans, input_key, incoming_val, collections_to_match )
            return permutations.input_classification.MATCHED, values
        else:
            return classify_unmodified_parameter( input_key )

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    # Will reuse this in subsequent work, so design this way now...
    def try_replace_key( key, suffix ):
        found = key.endswith( suffix )
        if found:
            simple_key = key[ 0:-len( suffix ) ]
            if simple_key not in incoming_template:
                incoming_template[ simple_key ] = None
        return found

    multirun_found = False
    collection_multirun_found = False
    for key, value in incoming.iteritems():
        if isinstance( value, dict ) and 'values' in value:
            batch = value.get( 'batch', False )
            if batch:
                if __collection_multirun_parameter( value ):
                    collection_multirun_found = True
                else:
                    multirun_found = True
            else:
                continue
        else:
            # Old-style batching (remove someday? - pretty hacky and didn't live in API long)
            try_replace_key( key, "|__multirun__" ) or multirun_found
            try_replace_key( key, "|__collection_multirun__" ) or collection_multirun_found

    expanded_incomings = permutations.expand_multi_inputs( incoming_template, classifier )
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections( collections_to_match )
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #5
0
def expand_meta_parameters(trans, tool, incoming):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """
    def classify_unmodified_parameter(input_key):
        value = incoming[input_key]
        if isinstance(value, dict) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get('batch', False)
            is_linked = value.get('linked', True)
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter(value):
                collection_value = value['values'][0]
                values = __expand_collection_parameter(trans, input_key,
                                                       collection_value,
                                                       collections_to_match)
            else:
                values = value['values']
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    def classifier(input_key):
        collection_multirun_key = "%s|__collection_multirun__" % input_key
        multirun_key = "%s|__multirun__" % input_key
        if multirun_key in incoming:
            multi_value = util.listify(incoming[multirun_key])
            if len(multi_value) > 1:
                return permutations.input_classification.MATCHED, multi_value
            else:
                if len(multi_value) == 0:
                    multi_value = None
                return permutations.input_classification.SINGLE, multi_value[0]
        elif collection_multirun_key in incoming:
            incoming_val = incoming[collection_multirun_key]
            values = __expand_collection_parameter(trans, input_key,
                                                   incoming_val,
                                                   collections_to_match)
            return permutations.input_classification.MATCHED, values
        else:
            return classify_unmodified_parameter(input_key)

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    # Will reuse this in subsequent work, so design this way now...
    def try_replace_key(key, suffix):
        found = key.endswith(suffix)
        if found:
            simple_key = key[0:-len(suffix)]
            if simple_key not in incoming_template:
                incoming_template[simple_key] = None
        return found

    multirun_found = False
    collection_multirun_found = False
    for key, value in incoming.iteritems():
        if isinstance(value, dict) and 'values' in value:
            batch = value.get('batch', False)
            if batch:
                if __collection_multirun_parameter(value):
                    collection_multirun_found = True
                else:
                    multirun_found = True
            else:
                continue
        else:
            # Old-style batching (remove someday? - pretty hacky and didn't live in API long)
            try_replace_key(key, "|__multirun__") or multirun_found
            try_replace_key(
                key, "|__collection_multirun__") or collection_multirun_found

    expanded_incomings = permutations.expand_multi_inputs(
        incoming_template, classifier)
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections(
            collections_to_match)
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #6
0
def expand_meta_parameters(trans, tool, incoming):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    to_remove = []
    for key in incoming.keys():
        if key.endswith("|__identifier__"):
            to_remove.append(key)
    for key in to_remove:
        incoming.pop(key)

    # If we're going to multiply input dataset combinations
    # order matters, so the following reorders incoming
    # according to tool.inputs (which is ordered).
    incoming_copy = incoming.copy()
    nested_dict = {}
    for incoming_key, incoming_value in incoming_copy.items():
        if not incoming_key.startswith('__'):
            process_key(incoming_key, incoming_value=incoming_value, d=nested_dict)

    reordered_incoming = OrderedDict()

    def visitor(input, value, prefix, prefixed_name, prefixed_label, error, **kwargs):
        if prefixed_name in incoming_copy:
            reordered_incoming[prefixed_name] = incoming_copy[prefixed_name]
            del incoming_copy[prefixed_name]

    visit_input_values(inputs=tool.inputs, input_values=nested_dict, callback=visitor)
    reordered_incoming.update(incoming_copy)

    def classifier(input_key):
        value = incoming[input_key]
        if isinstance(value, dict) and 'values' in value:
            # Explicit meta wrapper for inputs...
            is_batch = value.get('batch', False)
            is_linked = value.get('linked', True)
            if is_batch and is_linked:
                classification = permutations.input_classification.MATCHED
            elif is_batch:
                classification = permutations.input_classification.MULTIPLIED
            else:
                classification = permutations.input_classification.SINGLE
            if __collection_multirun_parameter(value):
                collection_value = value['values'][0]
                values = __expand_collection_parameter(trans, input_key, collection_value, collections_to_match, linked=is_linked)
            else:
                values = value['values']
        else:
            classification = permutations.input_classification.SINGLE
            values = value
        return classification, values

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = reordered_incoming

    expanded_incomings = permutations.expand_multi_inputs(incoming_template, classifier)
    if collections_to_match.has_collections():
        collection_info = trans.app.dataset_collections_service.match_collections(collections_to_match)
    else:
        collection_info = None
    return expanded_incomings, collection_info
Example #7
0
def expand_meta_parameters( trans, tool, incoming ):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """

    def classifier( input_key ):
        multirun_key = "%s|__multirun__" % input_key
        if multirun_key in incoming:
            multi_value = util.listify( incoming[ multirun_key ] )
            if len( multi_value ) > 1:
                return permutations.input_classification.MATCHED, multi_value
            else:
                if len( multi_value ) == 0:
                    multi_value = None
                return permutations.input_classification.SINGLE, multi_value[ 0 ]
        else:
            return permutations.input_classification.SINGLE, incoming[ input_key ]

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    def collection_classifier( input_key ):
        multirun_key = "%s|__collection_multirun__" % input_key
        if multirun_key in incoming:
            incoming_val = incoming[ multirun_key ]
            # If subcollectin multirun of data_collection param - value will
            # be "hdca_id|subcollection_type" else it will just be hdca_id
            if "|" in incoming_val:
                encoded_hdc_id, subcollection_type = incoming_val.split( "|", 1 )
            else:
                try:
                    src = incoming_val[ "src" ]
                    if src != "hdca":
                        raise exceptions.ToolMetaParameterException( "Invalid dataset collection source type %s" % src )
                    encoded_hdc_id = incoming_val[ "id" ]
                except TypeError:
                    encoded_hdc_id = incoming_val
                subcollection_type = None
            hdc_id = trans.app.security.decode_id( encoded_hdc_id )
            hdc = trans.sa_session.query( model.HistoryDatasetCollectionAssociation ).get( hdc_id )
            collections_to_match.add( input_key, hdc, subcollection_type=subcollection_type )
            if subcollection_type is not None:
                from galaxy.dataset_collections import subcollections
                subcollection_elements = subcollections.split_dataset_collection_instance( hdc, subcollection_type )
                return permutations.input_classification.MATCHED, subcollection_elements
            else:
                hdas = hdc.collection.dataset_instances
                return permutations.input_classification.MATCHED, hdas
        else:
            return permutations.input_classification.SINGLE, incoming[ input_key ]

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    # Will reuse this in subsequent work, so design this way now...
    def try_replace_key( key, suffix ):
        found = key.endswith( suffix )
        if found:
            simple_key = key[ 0:-len( suffix ) ]
            if simple_key not in incoming_template:
                incoming_template[ simple_key ] = None
        return found

    multirun_found = False
    collection_multirun_found = False
    for key, value in incoming.iteritems():
        multirun_found = try_replace_key( key, "|__multirun__" ) or multirun_found
        collection_multirun_found = try_replace_key( key, "|__collection_multirun__" ) or collection_multirun_found

    if sum( [ 1 if f else 0 for f in [ multirun_found, collection_multirun_found ] ] ) > 1:
        # In theory doable, but to complicated for a first pass.
        message = "Cannot specify parallel execution across both multiple datasets and dataset collections."
        raise exceptions.ToolMetaParameterException( message )

    if multirun_found:
        return permutations.expand_multi_inputs( incoming_template, classifier ), None
    else:
        expanded_incomings = permutations.expand_multi_inputs( incoming_template, collection_classifier )
        if collections_to_match.has_collections():
            collection_info = trans.app.dataset_collections_service.match_collections( collections_to_match )
        else:
            collection_info = None
        return expanded_incomings, collection_info
Example #8
0
def expand_meta_parameters(trans, tool, incoming):
    """
    Take in a dictionary of raw incoming parameters and expand to a list
    of expanded incoming parameters (one set of parameters per tool
    execution).
    """
    def classifier(input_key):
        multirun_key = "%s|__multirun__" % input_key
        if multirun_key in incoming:
            multi_value = util.listify(incoming[multirun_key])
            if len(multi_value) > 1:
                return permutations.input_classification.MATCHED, multi_value
            else:
                if len(multi_value) == 0:
                    multi_value = None
                return permutations.input_classification.SINGLE, multi_value[0]
        else:
            return permutations.input_classification.SINGLE, incoming[
                input_key]

    from galaxy.dataset_collections import matching
    collections_to_match = matching.CollectionsToMatch()

    def collection_classifier(input_key):
        multirun_key = "%s|__collection_multirun__" % input_key
        if multirun_key in incoming:
            incoming_val = incoming[multirun_key]
            # If subcollectin multirun of data_collection param - value will
            # be "hdca_id|subcollection_type" else it will just be hdca_id
            if "|" in incoming_val:
                encoded_hdc_id, subcollection_type = incoming_val.split("|", 1)
            else:
                try:
                    src = incoming_val["src"]
                    if src != "hdca":
                        raise exceptions.ToolMetaParameterException(
                            "Invalid dataset collection source type %s" % src)
                    encoded_hdc_id = incoming_val["id"]
                except TypeError:
                    encoded_hdc_id = incoming_val
                subcollection_type = None
            hdc_id = trans.app.security.decode_id(encoded_hdc_id)
            hdc = trans.sa_session.query(
                model.HistoryDatasetCollectionAssociation).get(hdc_id)
            collections_to_match.add(input_key,
                                     hdc,
                                     subcollection_type=subcollection_type)
            if subcollection_type is not None:
                from galaxy.dataset_collections import subcollections
                subcollection_elements = subcollections.split_dataset_collection_instance(
                    hdc, subcollection_type)
                return permutations.input_classification.MATCHED, subcollection_elements
            else:
                hdas = hdc.collection.dataset_instances
                return permutations.input_classification.MATCHED, hdas
        else:
            return permutations.input_classification.SINGLE, incoming[
                input_key]

    # Stick an unexpanded version of multirun keys so they can be replaced,
    # by expand_mult_inputs.
    incoming_template = incoming.copy()

    # Will reuse this in subsequent work, so design this way now...
    def try_replace_key(key, suffix):
        found = key.endswith(suffix)
        if found:
            simple_key = key[0:-len(suffix)]
            if simple_key not in incoming_template:
                incoming_template[simple_key] = None
        return found

    multirun_found = False
    collection_multirun_found = False
    for key, value in incoming.iteritems():
        multirun_found = try_replace_key(key,
                                         "|__multirun__") or multirun_found
        collection_multirun_found = try_replace_key(
            key, "|__collection_multirun__") or collection_multirun_found

    if sum(
        [1 if f else 0
         for f in [multirun_found, collection_multirun_found]]) > 1:
        # In theory doable, but to complicated for a first pass.
        message = "Cannot specify parallel execution across both multiple datasets and dataset collections."
        raise exceptions.ToolMetaParameterException(message)

    if multirun_found:
        return permutations.expand_multi_inputs(incoming_template,
                                                classifier), None
    else:
        expanded_incomings = permutations.expand_multi_inputs(
            incoming_template, collection_classifier)
        if collections_to_match.has_collections():
            collection_info = trans.app.dataset_collections_service.match_collections(
                collections_to_match)
        else:
            collection_info = None
        return expanded_incomings, collection_info