Exemple #1
0
op = args.operator

if op == "vb":
    flow = flow.visualizeBrain()

elif op == "v":
    nsamples = args.nsamples
    flow = flow.visualize(nsamples=int(nsamples))

elif op == "vc":
    k = args.k
    flow = flow.clustering(int(k)).visualize()

elif op == "ts":
    k = args.k
    flow = flow.clustering(int(k))
    flow.execute()
    with open("model", "a+") as output:
        pickle.dump(flow.last.result, output, pickle.HIGHEST_PROTOCOL)
        exit("Model Saved")

elif op == "pr":
    utils.predict(args.model, args.vector)
    exit()

else:
    exit("Operator not found")

flow.execute()
Exemple #2
0
import os.path as pth
import thunder
import utils

from workflow import Workflow

from pyspark import SparkContext

sc = SparkContext()

#data = pth.join(pth.dirname(pth.realpath(thunder.__file__)), 'utils/data/fish/images')
data = utils.readNifti("/home/vic/Dev/fMRI/bold_dico.nii")[:,:,:,:100]


flow1 = Workflow(data, sc)\
      .extract()\
      .visualize()\
      .clustering(12)\
      .visualize()\
      .visualizeBrain()

print "\n=====PLAN====\n" \
      "%s" \
      "=====PLAN=====\n"%flow1.explain()

flow1.execute()