Beispiel #1
0
def _loader_creator(specification, **kwargs):
    if isinstance(specification, (tuple, list)):
        specification, options = specification
        if options:
            kwargs.update(options)
    if isinstance(specification, basestring):
        return MSFileLoader(specification, **kwargs)
    if isinstance(specification, IonMobilitySourceRandomAccessFrameSource):
        return specification
    elif isinstance(specification, ScanIterator):
        return Generic3DIonMobilityFrameSource(specification, **kwargs)
    else:
        raise ValueError("Cannot determine how to get a ScanIterator from %r" %
                         (specification, ))
Beispiel #2
0
def preprocess(ms_file, outfile_path, averagine=None, start_time=None, end_time=None, maximum_charge=None,
               name=None, msn_averagine=None, score_threshold=35., msn_score_threshold=10., missed_peaks=1,
               msn_missed_peaks=1, background_reduction=5., msn_background_reduction=0.,
               transform=None, msn_transform=None, processes=4, extract_only_tandem_envelopes=False,
               ignore_msn=False, profile=False, isotopic_strictness=2.0, ms1_averaging=0,
               msn_isotopic_strictness=0.0, signal_to_noise_threshold=1.0, mass_offset=0.0, deconvolute=True):
    '''Convert raw mass spectra data into deisotoped neutral mass peak lists written to mzML.
    '''
    if transform is None:
        transform = []
    if msn_transform is None:
        msn_transform = []

    if (ignore_msn and extract_only_tandem_envelopes):
        click.secho(
            "Cannot use both --ignore-msn and --extract-only-tandem-envelopes",
            fg='red')
        raise click.Abort("Cannot use both --ignore-msn and --extract-only-tandem-envelopes")

    cache_handler_type = ThreadedMzMLScanCacheHandler
    click.echo("Preprocessing %s" % ms_file)
    minimum_charge = 1 if maximum_charge > 0 else -1
    charge_range = (minimum_charge, maximum_charge)

    loader = MSFileLoader(ms_file)
    if isinstance(loader, RandomAccessScanSource):
        last_scan = loader[len(loader) - 1]
        last_time = last_scan.scan_time
        start_scan = loader._locate_ms1_scan(
            loader.get_scan_by_time(start_time))
        if end_time > last_time:
            end_time = last_time
        end_scan = loader._locate_ms1_scan(
            loader.get_scan_by_time(end_time))

        start_scan_id = start_scan.id
        end_scan_id = end_scan.id

        start_scan_time = start_scan.scan_time
        end_scan_time = end_scan.scan_time

        loader.reset()
        loader.start_from_scan(start_scan_id, grouped=True)
    else:
        click.secho("The file format provided does not support random"
                    " access, start and end points will be ignored", fg='yellow')
        start_scan_time = 0
        start_scan_id = None

        end_scan_time = float('inf')
        end_scan_id = None
        loader.make_iterator(grouped=True)

    first_bunch = next(loader)
    if first_bunch.precursor is not None:
        is_profile = (first_bunch.precursor.is_profile or profile)
    elif first_bunch.products:
        is_profile = (first_bunch.products[0].is_profile or profile)

    if is_profile:
        click.secho("Spectra are profile")
    else:
        click.secho("Spectra are centroided", fg='yellow')

    if name is None:
        name = os.path.splitext(os.path.basename(ms_file))[0]

    if os.path.exists(outfile_path) and not os.access(outfile_path, os.W_OK):
        click.secho("Can't write to output file path", fg='red')
        raise click.Abort()

    click.secho("Initializing %s" % name, fg='green')
    click.echo("from %s (%0.2f) to %s (%0.2f)" % (
        start_scan_id, start_scan_time, end_scan_id, end_scan_time))
    if deconvolute:
        click.echo("charge range: %s" % (charge_range,))

    if is_profile:
        ms1_peak_picking_args = {
            "transforms": [
            ] + list(transform),
            "signal_to_noise_threshold": signal_to_noise_threshold
        }
        if background_reduction:
            ms1_peak_picking_args['transforms'].append(
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2))
            ms1_peak_picking_args['transforms'].append(ms_peak_picker.scan_filter.SavitskyGolayFilter())
    else:
        ms1_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2),
            ] + list(transform)
        }

    if msn_background_reduction > 0.0:
        msn_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=msn_background_reduction, window_length=2),
            ] + list(msn_transform)
        }
    else:
        msn_peak_picking_args = {
            "transforms": [
            ] + list(msn_transform)
        }

    if mass_offset != 0.0:
        ms1_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))
        msn_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))

    if deconvolute:
        if len(averagine) == 1:
            averagine = averagine[0]
            ms1_deconvoluter_type = ms_deisotope.deconvolution.AveraginePeakDependenceGraphDeconvoluter
        else:
            ms1_deconvoluter_type = ms_deisotope.deconvolution.MultiAveraginePeakDependenceGraphDeconvoluter

        ms1_deconvolution_args = {
            "scorer": ms_deisotope.scoring.PenalizedMSDeconVFitter(score_threshold, isotopic_strictness),
            "max_missed_peaks": missed_peaks,
            "averagine": averagine,
            "truncate_after": SampleConsumer.MS1_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below": SampleConsumer.MS1_IGNORE_BELOW,
            "deconvoluter_type": ms1_deconvoluter_type
        }

        if msn_isotopic_strictness >= 1:
            msn_isotopic_scorer = ms_deisotope.scoring.PenalizedMSDeconVFitter(
                msn_score_threshold, msn_isotopic_strictness)
        else:
            msn_isotopic_scorer = ms_deisotope.scoring.MSDeconVFitter(msn_score_threshold)

        msn_deconvolution_args = {
            "scorer": msn_isotopic_scorer,
            "averagine": msn_averagine,
            "max_missed_peaks": msn_missed_peaks,
            "truncate_after": SampleConsumer.MSN_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below": SampleConsumer.MSN_IGNORE_BELOW
        }
    else:
        ms1_deconvolution_args = None
        msn_deconvolution_args = None

    consumer = SampleConsumer(
        ms_file,
        ms1_peak_picking_args=ms1_peak_picking_args,
        ms1_deconvolution_args=ms1_deconvolution_args,
        msn_peak_picking_args=msn_peak_picking_args,
        msn_deconvolution_args=msn_deconvolution_args,
        storage_path=outfile_path, sample_name=name,
        start_scan_id=start_scan_id, cache_handler_type=cache_handler_type,
        end_scan_id=end_scan_id, n_processes=processes,
        extract_only_tandem_envelopes=extract_only_tandem_envelopes,
        ignore_tandem_scans=ignore_msn,
        ms1_averaging=ms1_averaging,
        deconvolute=deconvolute)
    consumer.display_header()
    consumer.start()
Beispiel #3
0
def deisotope(ms_file, outfile_path, averagine=None, start_time=None, end_time=None, maximum_charge=None,
              name=None, msn_averagine=None, score_threshold=35., msn_score_threshold=10., missed_peaks=1,
              msn_missed_peaks=1, background_reduction=0., msn_background_reduction=0.,
              transform=None, msn_transform=None, processes=4, extract_only_tandem_envelopes=False,
              ignore_msn=False, isotopic_strictness=2.0, ms1_averaging=0,
              msn_isotopic_strictness=0.0, signal_to_noise_threshold=1.0, mass_offset=0.0, deconvolute=True):
    '''Convert raw mass spectra data into deisotoped neutral mass peak lists written to mzML.
    '''
    if transform is None:
        transform = []
    if msn_transform is None:
        msn_transform = []

    if (ignore_msn and extract_only_tandem_envelopes):
        click.secho(
            "Cannot use both --ignore-msn and --extract-only-tandem-envelopes",
            fg='red')
        raise click.Abort("Cannot use both --ignore-msn and --extract-only-tandem-envelopes")

    cache_handler_type = workflow.ThreadedMzMLScanStorageHandler
    click.echo("Preprocessing %s" % ms_file)
    minimum_charge = 1 if maximum_charge > 0 else -1
    charge_range = (minimum_charge, maximum_charge)

    loader = MSFileLoader(ms_file)
    (start_scan_id, start_scan_time,
     end_scan_id, end_scan_time) = check_random_access(loader, start_time, end_time)

    is_profile = check_if_profile(loader)

    if name is None:
        name = os.path.splitext(os.path.basename(ms_file))[0]

    if os.path.exists(outfile_path) and not os.access(outfile_path, os.W_OK):
        click.secho("Can't write to output file path", fg='red')
        raise click.Abort()

    click.secho("Initializing %s" % name, fg='green')
    click.echo("from %s (%0.2f) to %s (%0.2f)" % (
        start_scan_id, start_scan_time, end_scan_id, end_scan_time))
    if deconvolute:
        click.echo("charge range: %s" % (charge_range,))

    if is_profile:
        ms1_peak_picking_args = {
            "transforms": [
            ] + list(transform),
            "signal_to_noise_threshold": signal_to_noise_threshold
        }
        if background_reduction:
            ms1_peak_picking_args['transforms'].append(
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2))
            ms1_peak_picking_args['transforms'].append(ms_peak_picker.scan_filter.SavitskyGolayFilter())
    else:
        ms1_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2),
            ] + list(transform)
        }

    if msn_background_reduction > 0.0:
        msn_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=msn_background_reduction, window_length=2),
            ] + list(msn_transform)
        }
    else:
        msn_peak_picking_args = {
            "transforms": [
            ] + list(msn_transform)
        }

    if mass_offset != 0.0:
        ms1_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))
        msn_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))

    if deconvolute:
        if len(averagine) == 1:
            averagine = averagine[0]
            ms1_deconvoluter_type = ms_deisotope.deconvolution.AveraginePeakDependenceGraphDeconvoluter
        else:
            ms1_deconvoluter_type = ms_deisotope.deconvolution.MultiAveraginePeakDependenceGraphDeconvoluter

        ms1_deconvolution_args = {
            "scorer": ms_deisotope.scoring.PenalizedMSDeconVFitter(score_threshold, isotopic_strictness),
            "max_missed_peaks": missed_peaks,
            "averagine": averagine,
            "truncate_after": workflow.SampleConsumer.MS1_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below": workflow.SampleConsumer.MS1_IGNORE_BELOW,
            "deconvoluter_type": ms1_deconvoluter_type,
            "use_quick_charge": True
        }

        if msn_isotopic_strictness >= 1:
            msn_isotopic_scorer = ms_deisotope.scoring.PenalizedMSDeconVFitter(
                msn_score_threshold, msn_isotopic_strictness)
        else:
            msn_isotopic_scorer = ms_deisotope.scoring.MSDeconVFitter(msn_score_threshold)

        msn_deconvolution_args = {
            "scorer": msn_isotopic_scorer,
            "averagine": msn_averagine,
            "max_missed_peaks": msn_missed_peaks,
            "truncate_after": workflow.SampleConsumer.MSN_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below": workflow.SampleConsumer.MSN_IGNORE_BELOW,
            "use_quick_charge": True
        }
    else:
        ms1_deconvolution_args = None
        msn_deconvolution_args = None

    consumer = workflow.SampleConsumer(
        ms_file,
        ms1_peak_picking_args=ms1_peak_picking_args,
        ms1_deconvolution_args=ms1_deconvolution_args,
        msn_peak_picking_args=msn_peak_picking_args,
        msn_deconvolution_args=msn_deconvolution_args,
        storage_path=outfile_path, sample_name=name,
        start_scan_id=start_scan_id, storage_type=cache_handler_type,
        end_scan_id=end_scan_id, n_processes=processes,
        extract_only_tandem_envelopes=extract_only_tandem_envelopes,
        ignore_tandem_scans=ignore_msn,
        ms1_averaging=ms1_averaging,
        deconvolute=deconvolute)
    consumer.start()
Beispiel #4
0
def rt_to_id(ms_file, rt):
    loader = MSFileLoader(ms_file)
    id = loader._locate_ms1_scan(loader.get_scan_by_time(rt)).id
    click.echo(id)
Beispiel #5
0
def get_scan(key, scan_id):
    path = key_index[key]
    reader, lock = reader_index[path]
    values = request.values
    print(values)
    with lock:
        scan = reader.get_scan_by_id(scan_id)
        response = format_scan(scan, values)
    return response


if __name__ == "__main__":
    import sys
    for i, path in enumerate(sys.argv[1:]):
        print("Loading {0} with Key {1}".format(path, i))
        reader = MSFileLoader(path)
        index_path = ExtendedScanIndex.index_file_name(path)
        if os.path.exists(index_path):
            file_index = ExtendedScanIndex.load(open(index_path, 'rt'))
        else:
            print("Indexing {0}".format(path))
            reader.reset()
            file_index, scan_tree = quick_index.index(reader)
            reader.reset()
            with open(index_path, 'wt') as fh:
                file_index.dump(fh)
        print(file_index)
        metadata_index[path] = file_index
        reader_index[path] = reader, RLock()
        key_index[str(i)] = path
def preprocess(mzml_file, database_connection, averagine=None, start_time=None, end_time=None,
               maximum_charge=None, name=None, msn_averagine=None, score_threshold=35.,
               msn_score_threshold=5., missed_peaks=1, msn_missed_peaks=1, n_processes=5, storage_path=None,
               extract_only_tandem_envelopes=False, ms1_background_reduction=5.,
               msn_background_reduction=0, ms1_averaging=0, channel=None):

    minimum_charge = 1 if maximum_charge > 0 else -1
    charge_range = (minimum_charge, maximum_charge)
    logger.info("Begin Scan Interpolation")
    loader: RandomAccessScanSource = MSFileLoader(mzml_file)
    if len(loader) == 0:
        channel.abort("Cannot process an empty MS data file")
    start_scan = loader.get_scan_by_time(start_time)
    if start_scan is None:
        start_scan = loader[0]

    if loader.has_ms1_scans() == False:
        extract_only_tandem_envelopes = False

    try:
        start_scan_id = loader._locate_ms1_scan(start_scan).id
    except IndexError:
        start_scan_id = start_scan.id

    end_scan = loader.get_scan_by_time(end_time)
    if end_scan is None:
        end_scan = loader[-1]
    try:
        end_scan_id = loader._locate_ms1_scan(end_scan).id
    except IndexError:
        end_scan_id = end_scan.id

    loader.reset()
    loader.make_iterator(grouped=True)

    first_batch = next(loader)
    if first_batch.precursor is not None:
        is_profile = first_batch.precursor.is_profile
    elif first_batch.products:
        is_profile = first_batch.products[0].is_profile
    if is_profile:
        logger.info("Spectra are profile")
    else:
        logger.info("Spectra are centroided")

    logger.info("Resolving Sample Name")
    if name is None:
        name = os.path.splitext(os.path.basename(mzml_file))[0]

    name = validate_sample_run_name(None, database_connection, name)

    logger.info("Validating arguments")
    try:
        averagine = validate_averagine(averagine)
    except Exception:
        channel.abort("Could not validate MS1 Averagine %s" % averagine)

    try:
        msn_averagine = validate_averagine(msn_averagine)
    except Exception:
        channel.abort("Could not validate MSn Averagine %s" % msn_averagine)

    if is_profile:
        ms1_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=ms1_background_reduction, window_length=2.),
                ms_peak_picker.scan_filter.SavitskyGolayFilter()
            ],
            'signal_to_noise_threshold': 1.0,
        }
        if ms1_background_reduction == 0:
            ms1_peak_picking_args['transforms'] = []
    else:
        ms1_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=ms1_background_reduction, window_length=2.),
            ]
        }
        if ms1_background_reduction == 0:
            ms1_peak_picking_args['transforms'] = []

    if msn_background_reduction > 0:
        msn_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=msn_background_reduction, window_length=2.),
            ]
        }
    else:
        msn_peak_picking_args = {'transforms': []}

    ms1_deconvolution_args = {
        "scorer": ms_deisotope.scoring.PenalizedMSDeconVFitter(score_threshold, 2.),
        "averagine": averagine,
        "charge_range": charge_range,
        "max_missed_peaks": missed_peaks,
        "truncate_after": SampleConsumer.MS1_ISOTOPIC_PATTERN_WIDTH,
        "ignore_below": SampleConsumer.MS1_IGNORE_BELOW
    }

    msn_deconvolution_args = {
        "scorer": ms_deisotope.scoring.MSDeconVFitter(msn_score_threshold),
        "averagine": msn_averagine,
        "charge_range": charge_range,
        "max_missed_peaks": msn_missed_peaks,
        "truncate_after": SampleConsumer.MSN_ISOTOPIC_PATTERN_WIDTH,
        "ignore_below": SampleConsumer.MSN_IGNORE_BELOW
    }

    consumer = SampleConsumer(
        mzml_file,
        ms1_peak_picking_args=ms1_peak_picking_args,
        ms1_deconvolution_args=ms1_deconvolution_args,
        msn_peak_picking_args=msn_peak_picking_args,
        msn_deconvolution_args=msn_deconvolution_args,
        storage_path=storage_path,
        sample_name=name,
        start_scan_id=start_scan_id,
        end_scan_id=end_scan_id,
        n_processes=n_processes,
        extract_only_tandem_envelopes=extract_only_tandem_envelopes,
        ms1_averaging=ms1_averaging,
        cache_handler_type=ThreadedMzMLScanCacheHandler)

    try:
        consumer.start()
        logger.info("Updating New Sample Run")
        reader = ProcessedMzMLDeserializer(storage_path, use_index=False)
        reader.read_index_file()
        sample_run_data = reader.sample_run
        if reader.extended_index.msn_ids:
            sample_type = "MS/MS Sample"
        else:
            sample_type = "MS Sample"
        sample_run = sample.SampleRunRecord(
            name=sample_run_data.name,
            uuid=sample_run_data.uuid,
            completed=True,
            path=storage_path,
            sample_type=sample_type,
            user_id=channel.user.id)
        channel.send(Message(sample_run.to_json(), "new-sample-run"))
    except Exception:
        channel.send(Message.traceback())
        channel.abort("An error occurred during preprocessing.")
Beispiel #7
0
def rt_to_id(ms_file, rt):
    loader = MSFileLoader(ms_file)
    id = loader._locate_ms1_scan(loader.get_scan_by_time(rt)).id
    click.echo(id)
Beispiel #8
0
def preprocess(ms_file,
               outfile_path,
               averagine=None,
               start_time=None,
               end_time=None,
               maximum_charge=None,
               name=None,
               msn_averagine=None,
               score_threshold=35.,
               msn_score_threshold=10.,
               missed_peaks=1,
               msn_missed_peaks=1,
               background_reduction=5.,
               msn_background_reduction=0.,
               transform=None,
               msn_transform=None,
               processes=4,
               extract_only_tandem_envelopes=False,
               ignore_msn=False,
               profile=False,
               isotopic_strictness=2.0,
               ms1_averaging=0,
               msn_isotopic_strictness=0.0,
               signal_to_noise_threshold=1.0,
               mass_offset=0.0,
               deconvolute=True):
    '''Convert raw mass spectra data into deisotoped neutral mass peak lists written to mzML.
    '''
    if transform is None:
        transform = []
    if msn_transform is None:
        msn_transform = []

    if (ignore_msn and extract_only_tandem_envelopes):
        click.secho(
            "Cannot use both --ignore-msn and --extract-only-tandem-envelopes",
            fg='red')
        raise click.Abort(
            "Cannot use both --ignore-msn and --extract-only-tandem-envelopes")

    cache_handler_type = ThreadedMzMLScanCacheHandler
    click.echo("Preprocessing %s" % ms_file)
    minimum_charge = 1 if maximum_charge > 0 else -1
    charge_range = (minimum_charge, maximum_charge)

    loader = MSFileLoader(ms_file)
    if isinstance(loader, RandomAccessScanSource):
        last_scan = loader[len(loader) - 1]
        last_time = last_scan.scan_time
        start_scan = loader._locate_ms1_scan(
            loader.get_scan_by_time(start_time))
        if end_time > last_time:
            end_time = last_time
        end_scan = loader._locate_ms1_scan(loader.get_scan_by_time(end_time))

        start_scan_id = start_scan.id
        end_scan_id = end_scan.id

        start_scan_time = start_scan.scan_time
        end_scan_time = end_scan.scan_time

        loader.reset()
        loader.start_from_scan(start_scan_id, grouped=True)
    else:
        click.secho(
            "The file format provided does not support random"
            " access, start and end points will be ignored",
            fg='yellow')
        start_scan_time = 0
        start_scan_id = None

        end_scan_time = float('inf')
        end_scan_id = None
        loader.make_iterator(grouped=True)

    first_bunch = next(loader)
    if first_bunch.precursor is not None:
        is_profile = (first_bunch.precursor.is_profile or profile)
    elif first_bunch.products:
        is_profile = (first_bunch.products[0].is_profile or profile)

    if is_profile:
        click.secho("Spectra are profile")
    else:
        click.secho("Spectra are centroided", fg='yellow')

    if name is None:
        name = os.path.splitext(os.path.basename(ms_file))[0]

    if os.path.exists(outfile_path) and not os.access(outfile_path, os.W_OK):
        click.secho("Can't write to output file path", fg='red')
        raise click.Abort()

    click.secho("Initializing %s" % name, fg='green')
    click.echo("from %s (%0.2f) to %s (%0.2f)" %
               (start_scan_id, start_scan_time, end_scan_id, end_scan_time))
    if deconvolute:
        click.echo("charge range: %s" % (charge_range, ))

    if is_profile:
        ms1_peak_picking_args = {
            "transforms": [] + list(transform),
            "signal_to_noise_threshold": signal_to_noise_threshold
        }
        if background_reduction:
            ms1_peak_picking_args['transforms'].append(
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2))
            ms1_peak_picking_args['transforms'].append(
                ms_peak_picker.scan_filter.SavitskyGolayFilter())
    else:
        ms1_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=background_reduction, window_length=2),
            ] + list(transform)
        }

    if msn_background_reduction > 0.0:
        msn_peak_picking_args = {
            "transforms": [
                ms_peak_picker.scan_filter.FTICRBaselineRemoval(
                    scale=msn_background_reduction, window_length=2),
            ] + list(msn_transform)
        }
    else:
        msn_peak_picking_args = {"transforms": [] + list(msn_transform)}

    if mass_offset != 0.0:
        ms1_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))
        msn_peak_picking_args['transforms'].append(
            ms_peak_picker.scan_filter.RecalibrateMass(offset=mass_offset))

    if deconvolute:
        if len(averagine) == 1:
            averagine = averagine[0]
            ms1_deconvoluter_type = ms_deisotope.deconvolution.AveraginePeakDependenceGraphDeconvoluter
        else:
            ms1_deconvoluter_type = ms_deisotope.deconvolution.MultiAveraginePeakDependenceGraphDeconvoluter

        ms1_deconvolution_args = {
            "scorer":
            ms_deisotope.scoring.PenalizedMSDeconVFitter(
                score_threshold, isotopic_strictness),
            "max_missed_peaks":
            missed_peaks,
            "averagine":
            averagine,
            "truncate_after":
            SampleConsumer.MS1_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below":
            SampleConsumer.MS1_IGNORE_BELOW,
            "deconvoluter_type":
            ms1_deconvoluter_type
        }

        if msn_isotopic_strictness >= 1:
            msn_isotopic_scorer = ms_deisotope.scoring.PenalizedMSDeconVFitter(
                msn_score_threshold, msn_isotopic_strictness)
        else:
            msn_isotopic_scorer = ms_deisotope.scoring.MSDeconVFitter(
                msn_score_threshold)

        msn_deconvolution_args = {
            "scorer": msn_isotopic_scorer,
            "averagine": msn_averagine,
            "max_missed_peaks": msn_missed_peaks,
            "truncate_after": SampleConsumer.MSN_ISOTOPIC_PATTERN_WIDTH,
            "ignore_below": SampleConsumer.MSN_IGNORE_BELOW
        }
    else:
        ms1_deconvolution_args = None
        msn_deconvolution_args = None

    consumer = SampleConsumer(
        ms_file,
        ms1_peak_picking_args=ms1_peak_picking_args,
        ms1_deconvolution_args=ms1_deconvolution_args,
        msn_peak_picking_args=msn_peak_picking_args,
        msn_deconvolution_args=msn_deconvolution_args,
        storage_path=outfile_path,
        sample_name=name,
        start_scan_id=start_scan_id,
        cache_handler_type=cache_handler_type,
        end_scan_id=end_scan_id,
        n_processes=processes,
        extract_only_tandem_envelopes=extract_only_tandem_envelopes,
        ignore_tandem_scans=ignore_msn,
        ms1_averaging=ms1_averaging,
        deconvolute=deconvolute)
    consumer.display_header()
    consumer.start()