Exemplo n.º 1
0
def main():
    settings = load_settings()

    targets = [
        'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'
    ]

    # The genetic algorithm will be run individually on each pipeline group
    pipeline_groups = [
        ([
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
        ], 0.55),
        ([
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ], 0.55),
        ([
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy(
                    [0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5,
                     24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
        ], 0.55),
    ]

    make_submission = len(sys.argv) >= 2 and sys.argv[1] == 'submission'
    run_ga = not make_submission

    # This classifier is used in the genetic algorithm
    ga_classifier, ga_classifier_name = make_svm(gamma=0.0079, C=2.7)

    if run_ga:
        quiet = False
        summaries = []
        for ngen in [10]:
            for pipelines, ratio in pipeline_groups:
                out = []
                for target in targets:
                    print 'Running target', target
                    run_prepare_data_for_cross_validation(settings, [target],
                                                          pipelines,
                                                          quiet=True)
                    pipeline = FeatureConcatPipeline(*pipelines)
                    score, best_N = process_target(settings,
                                                   target,
                                                   pipeline,
                                                   ga_classifier,
                                                   ga_classifier_name,
                                                   ratio=ratio,
                                                   ngen=ngen,
                                                   quiet=quiet)
                    print target, score, [
                        np.sum(mask) for mask in best_N[0:10]
                    ]
                    out.append((target, score, pipeline, best_N))

            scores = np.array([score for _, score, _, _ in out])
            summary = get_score_summary(
                '%s ngen=%d' % (ga_classifier_name, ngen), scores,
                np.mean(scores), targets)
            summaries.append((summary, np.mean(scores)))
            print summary

        print_results(summaries)

    if make_submission:
        random_pipelines = [
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     FreqCorrelation(1, None, 'none')),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
        ]

        # These classifiers are used to make the final predictions
        final_classifiers = [
            # make_svm(gamma=0.0079, C=2.7),
            make_svm(gamma=0.0068, C=2.0),
            # make_svm(gamma=0.003, C=150.0),
            # make_lr(C=0.04),
            # make_simple_lr(),
        ]
        targets_and_pipelines = get_submission_targets_and_masks(
            settings, targets, ga_classifier, ga_classifier_name,
            pipeline_groups, random_pipelines)
        for classifier, classifier_name in final_classifiers:
            run_make_submission(settings, targets_and_pipelines, classifier,
                                classifier_name)
Exemplo n.º 2
0
def main():
    settings = load_settings()

    pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75), Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75), FreqCorrelation(1, None, 'none')),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    targets = [
        'Dog_1',
        'Dog_2',
        'Dog_3',
        'Dog_4',
        'Dog_5',
        'Patient_1',
        'Patient_2'
    ]

    classifiers = [
        make_svm(gamma=0.0079, C=2.7),
        make_svm(gamma=0.0068, C=2.0),
        make_svm(gamma=0.003, C=150.0),
        make_lr(C=0.04),
        make_simple_lr(),
    ]


    make_submission = len(sys.argv) >= 2 and sys.argv[1] == 'submission'
    do_cv = not make_submission

    if do_cv:
        mask_range = [3]
        split_ratios = [0.4, 0.525, 0.6]
        run_prepare_data_for_cross_validation(settings, targets, pipelines)
        run_cross_validation(settings, targets, pipelines, mask_range, split_ratios, classifiers)

    if make_submission:
        num_masks = 10
        split_ratio = 0.525
        classifiers = [
            # make_svm(gamma=0.0079, C=2.7),
            make_svm(gamma=0.0068, C=2.0),
            # make_svm(gamma=0.003, C=150.0),
            # make_lr(C=0.04),
            # make_simple_lr(),
        ]

        targets_and_pipelines = []
        pipeline = pipelines[0]
        for classifier, classifier_name in classifiers:
            for i, target in enumerate(targets):
                run_prepare_data(settings, [target], [pipeline], test=True)
                feature_masks = generate_feature_masks(settings, target, pipeline, num_masks, split_ratio, random_state=0, quiet=True)
                targets_and_pipelines.append((target, pipeline, feature_masks, classifier, classifier_name))

        run_make_submission(settings, targets_and_pipelines, split_ratio)
Exemplo n.º 3
0
def main():
    settings = load_settings()

    pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     FreqCorrelation(1, None, 'none')),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy(
                    [0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5,
                     24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    targets = [
        'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'
    ]

    classifiers = [
        make_svm(gamma=0.0079, C=2.7),
        make_svm(gamma=0.0068, C=2.0),
        make_svm(gamma=0.003, C=150.0),
        make_lr(C=0.04),
        make_simple_lr(),
    ]

    make_submission = len(sys.argv) >= 2 and sys.argv[1] == 'submission'
    do_cv = not make_submission

    if do_cv:
        mask_range = [3]
        split_ratios = [0.4, 0.525, 0.6]
        run_prepare_data_for_cross_validation(settings, targets, pipelines)
        run_cross_validation(settings, targets, pipelines, mask_range,
                             split_ratios, classifiers)

    if make_submission:
        num_masks = 10
        split_ratio = 0.525
        classifiers = [
            # make_svm(gamma=0.0079, C=2.7),
            make_svm(gamma=0.0068, C=2.0),
            # make_svm(gamma=0.003, C=150.0),
            # make_lr(C=0.04),
            # make_simple_lr(),
        ]

        targets_and_pipelines = []
        pipeline = pipelines[0]
        for classifier, classifier_name in classifiers:
            for i, target in enumerate(targets):
                run_prepare_data(settings, [target], [pipeline], test=True)
                feature_masks = generate_feature_masks(settings,
                                                       target,
                                                       pipeline,
                                                       num_masks,
                                                       split_ratio,
                                                       random_state=0,
                                                       quiet=True)
                targets_and_pipelines.append((target, pipeline, feature_masks,
                                              classifier, classifier_name))

        run_make_submission(settings, targets_and_pipelines, split_ratio)
Exemplo n.º 4
0
    'Dog_2',
    'Dog_3',
    'Dog_4',
    'Dog_5',
    'Patient_1',
    'Patient_2'
]

class Zero:
    def get_name(self):
        return 'zero'

    def apply(self, X, meta):
        return np.zeros(list(X.shape[:-1]) + [1])

settings = load_settings()
pipeline = Pipeline(InputSource(), Zero())

strategies = [
    LegacyStrategy(),
    KFoldStrategy(),
]

for strategy in strategies:
    print 'Strategy', strategy.get_name()
    for target in targets:
        _, preictal_meta = load_pipeline_data(settings, target, 'preictal', pipeline, check_only=False, quiet=True, meta_only=True)
        # _, interictal_meta = load_pipeline_data(settings, target, 'interictal', pipeline, check_only=False, quiet=True, meta_only=True)
        fold_numbers = strategy.get_folds(preictal_meta)
        data = np.arange(0, preictal_meta.X_shape[0]).astype(np.int)
        sequence_ranges = collect_sequence_ranges(preictal_meta.sequence)
Exemplo n.º 5
0
def main():
    settings = load_settings()

    targets = [
        'Dog_1',
        'Dog_2',
        'Dog_3',
        'Dog_4',
        'Dog_5',
        'Patient_1',
        'Patient_2'
    ]

    # The genetic algorithm will be run individually on each pipeline group
    pipeline_groups = [
        ([
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
        ], 0.55),
        ([
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ], 0.55),
        ([
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
        ], 0.55),
    ]

    make_submission = len(sys.argv) >= 2 and sys.argv[1] == 'submission'
    run_ga = not make_submission

    # This classifier is used in the genetic algorithm
    ga_classifier, ga_classifier_name = make_svm(gamma=0.0079, C=2.7)

    if run_ga:
        quiet = False
        summaries = []
        for ngen in [10]:
            for pipelines, ratio in pipeline_groups:
                out = []
                for target in targets:
                    print 'Running target', target
                    run_prepare_data_for_cross_validation(settings, [target], pipelines, quiet=True)
                    pipeline = FeatureConcatPipeline(*pipelines)
                    score, best_N = process_target(settings, target, pipeline, ga_classifier, ga_classifier_name, ratio=ratio, ngen=ngen, quiet=quiet)
                    print target, score, [np.sum(mask) for mask in best_N[0:10]]
                    out.append((target, score, pipeline, best_N))

            scores = np.array([score for _, score, _, _ in out])
            summary = get_score_summary('%s ngen=%d' % (ga_classifier_name, ngen), scores)
            summaries.append((summary, np.mean(scores)))
            print summary

        print_results(summaries)

    if make_submission:
        random_pipelines = [
            Pipeline(InputSource(), Preprocess(), Windower(75), Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75), FreqCorrelation(1, None, 'none')),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
        ]

        # These classifiers are used to make the final predictions
        final_classifiers = [
            # make_svm(gamma=0.0079, C=2.7),
            make_svm(gamma=0.0068, C=2.0),
            # make_svm(gamma=0.003, C=150.0),
            # make_lr(C=0.04),
            # make_simple_lr(),
        ]
        targets_and_pipelines = get_submission_targets_and_masks(settings, targets, ga_classifier, ga_classifier_name, pipeline_groups, random_pipelines)
        for classifier, classifier_name in final_classifiers:
            run_make_submission(settings, targets_and_pipelines, classifier, classifier_name)
Exemplo n.º 6
0
def main():

    settings = load_settings()

    targets = [
        'Dog_1', 'Dog_2', 'Dog_3', 'Dog_4', 'Dog_5', 'Patient_1', 'Patient_2'
    ]

    pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     FreqCorrelation(1, None, 'none')),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy(
                    [0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5,
                     24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    classifiers = [
        make_svm(gamma=0.0079, C=2.7),
        make_svm(gamma=0.0068, C=2.0),
        make_svm(gamma=0.003, C=150.0),
        make_lr(C=0.04),
        make_simple_lr(),
    ]

    submission_pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75),
                     FreqCorrelation(1, None, 'none')),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy(
                    [0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5,
                     24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([6, 15, 24])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(
                InputSource(Preprocess(), Windower(75), FFT(), Magnitude()),
                PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    submission_classifiers = [
        make_simple_lr(),
    ]

    if len(sys.argv) >= 2 and sys.argv[1] == 'submission':
        run_make_submission(settings, targets, submission_classifiers,
                            submission_pipelines)
    else:
        run_cross_validation(settings, targets, classifiers, pipelines)
Exemplo n.º 7
0
def main():

    settings = load_settings()

    targets = [
        'Dog_1',
        'Dog_2',
        'Dog_3',
        'Dog_4',
        'Dog_5',
        'Patient_1',
        'Patient_2'
    ]

    pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75), Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75), FreqCorrelation(1, None, 'none')),

            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([3.5, 6, 15])),

            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    classifiers = [
        make_svm(gamma=0.0079, C=2.7),
        make_svm(gamma=0.0068, C=2.0),
        make_svm(gamma=0.003, C=150.0),
        make_lr(C=0.04),
        make_simple_lr(),
    ]

    submission_pipelines = [
        FeatureConcatPipeline(
            Pipeline(InputSource(), Preprocess(), Windower(75), Correlation('none')),
            Pipeline(InputSource(), Preprocess(), Windower(75), FreqCorrelation(1, None, 'none')),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), FreqBinning(winning_bins, 'mean'), Log10(), FlattenChannels()),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 1, 1.75, 2.5, 3.25, 4, 5, 8.5, 12, 15.5, 19.5, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5, 6, 15])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([0.25, 2, 3.5])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([6, 15, 24])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([2, 3.5, 6])),
            Pipeline(InputSource(Preprocess(), Windower(75), FFT(), Magnitude()), PIBSpectralEntropy([3.5, 6, 15])),
            Pipeline(InputSource(), Preprocess(), Windower(75), HFD(2)),
            Pipeline(InputSource(), Preprocess(), Windower(75), PFD()),
            Pipeline(InputSource(), Preprocess(), Windower(75), Hurst()),
        ),
    ]

    submission_classifiers = [
        make_simple_lr(),
    ]

    if len(sys.argv) >= 2 and sys.argv[1] == 'submission':
        run_make_submission(settings, targets, submission_classifiers, submission_pipelines)
    else:
        run_cross_validation(settings, targets, classifiers, pipelines)