Пример #1
0
def invariant_compute(invariants, graph_fn, invariants_path, data_dir,
                      in_graph_format, to_email):
    """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """
    dwnld_loc = get_download_path(data_dir)

    try:
        invariant_fns = run_invariants(invariants,
                                       graph_fn,
                                       invariants_path,
                                       graph_format=in_graph_format)

        if isinstance(invariant_fns, str):
            raise Exception
        else:
            print 'Invariants for annoymous project %s complete...' % graph_fn

    except Exception, msg:
        msg = """
Hello,\n\nYour most recent job for '%s' failed possibly because:\n- '%s'.
\n\n"You may have some partially completed data at {}.
\nPlease check these and try again.\n\n
""" % (os.path.basename(graph_fn), msg)

        sendJobFailureEmail(to_email, msg, dwnld_loc)
        return
Пример #2
0
def convert(media_root, uploadedFiles, convert_file_save_loc, input_format, output_format, to_email):
  # Send begin job email
  content = "Hello,\n\n You requested the following files be converted:"
  for fn in uploadedFiles:
    content += "\n- " + os.path.basename(fn)
  content += "\n\nTo the following formats:"
  for fmt in output_format:
    content += "\n- " + fmt

  sendEmail(to_email, "Job launch Notification", content+"\n\n")
  # End Email junk

  err_msg = ""
  outfn = ""
  for fn in uploadedFiles:
    outfn, err_msg = convertTo.convert_graph(fn, input_format, convert_file_save_loc, *output_format)

  dwnld_loc = get_download_path(convert_file_save_loc)
  print "Download path: {0}".format(dwnld_loc)

  if (err_msg):
    err_msg = "Your job completed with errors. The result can be found at {}.\n\n"\
        "Message: %s\n\n" % err_msg
    sendJobFailureEmail(to_email, err_msg, dwnld_loc)
  else:
    sendJobCompleteEmail(to_email,dwnld_loc) 
Пример #3
0
def invariant_compute(invariants, graph_fn, invariants_path, data_dir, in_graph_format, to_email):
  """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """
  dwnld_loc = get_download_path(data_dir)

  try:
    invariant_fns = run_invariants(invariants, graph_fn,
                invariants_path, 
                graph_format=in_graph_format)

    if isinstance(invariant_fns, str):
      raise Exception
    else:
      print 'Invariants for annoymous project %s complete...' % graph_fn

  except Exception, msg:
    msg = """
Hello,\n\nYour most recent job for '%s' failed possibly because:\n- '%s'.
\n\n"You may have some partially completed data at {}.
\nPlease check these and try again.\n\n
""" % (os.path.basename(graph_fn), msg)

    sendJobFailureEmail(to_email, msg, dwnld_loc)
    return
Пример #4
0
def scale_convert(selected_files, dl_format, ds_factor, ATLASES, email=None, dwnld_loc=None, zip_fn=None):
    # Debug
    print "Entering scale function ..."
    try:
        if dl_format == "graphml" and ds_factor == 0:
            temp = zipfiles(selected_files, use_genus=True, zip_out_fn=zip_fn)

        else:
            files_to_zip = {}

            for fn in selected_files:
                # No matter what we need a temp file
                print "Creating temp file ..."
                tmpfile = tempfile.NamedTemporaryFile("w", delete=False, dir="/data/pytmp")
                print "Temp file %s created ..." % tmpfile.name
                tmpfile.close()

                # Downsample only valid for *BIG* human brains!
                # *NOTE: If smallgraphs are ingested this will break

                if ds_factor and get_genus(fn) == "human":
                    if isinstance(ds_factor, int):
                        print "downsampling to factor %d" % ds_factor
                        g = downsample(igraph_io.read_arbitrary(fn, "graphml"), ds_factor)
                        print "downsample complete"
                    else:
                        g = downsample(igraph_io.read_arbitrary(fn, "graphml"), atlas=nib_load(ATLASES[ds_factor]))
                else:
                    g = igraph_io.read_arbitrary(fn, "graphml")

                # Write to `some` format
                if dl_format == "mm":
                    igraph_io.write_mm(g, tmpfile.name)
                else:
                    g.write(tmpfile.name, format=dl_format)

                files_to_zip[fn] = tmpfile.name

            temp = zipfiles(files_to_zip, use_genus=True, zip_out_fn=zip_fn, gformat=dl_format)
            # Del temp files
            for tmpfn in files_to_zip.values():
                print "Deleting %s ..." % tmpfn
                os.remove(tmpfn)

    except Exception, msg:
        print "An exception was thrown and caught with message %s!" % msg
        if email:
            msg = """
Hello,\n\nYour most recent job failed to complete.
\nYou may have some partially completed data at {}.\n\n
"""
            sendJobFailureEmail(email, msg, dwnld_loc)
            return
        else:
            return 'An error occurred while processing your request. Please send an email to \
Пример #5
0
def process_input_data(derivatives, graph_loc, graphsize, invariants, proj_dir,
                       to_email):
    '''
  Extract File name & determine what file corresponds to what for gengraph
  @param session: the session dictionary object
  '''
    """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """

    filesInUploadDir = os.listdir(derivatives)

    fiber_fn, data_atlas_fn = checkFileExtGengraph(
        filesInUploadDir)  # Check & sort files
    ''' Fully qualify file names '''
    fiber_fn = os.path.join(derivatives, fiber_fn)

    if not data_atlas_fn:
        data_atlas_fn = settings.ATLASES.keys()[0]
    else:
        data_atlas_fn = os.path.join(derivatives, data_atlas_fn)

    print "data_atlas_fn %s ..." % data_atlas_fn

    Gfn = os.path.join(graph_loc, getFiberID(fiber_fn))  # partial name
    if (graphsize).lower().startswith("s"):
        Gfn += "smgr.graphml"
        graphsize = False  # False is small graph
    elif graphsize.lower().startswith("b"):
        Gfn += "bggr.graphml"
        graphsize = True  # True is big graph
    else:
        print '[ERROR]: Graphsize Unkwown'  # should never happen

    try:
        genGraph(fiber_fn, data_atlas_fn, Gfn, graphsize,
                 **settings.ATLASES)  # FIXME: numfibers = 20000 for tests
    except:
        msg = "Hello,\n\nYour most recent job failed either because your fiber streamline file or ROI mask was incorrectly formatted."
        msg += " Please check both and try again.\n\n"
        sendJobFailureEmail(to_email, msg)
        return 911

    # Run ivariants here
    if len(invariants) > 0:
        print "Computing invariants {0}".format(invariants)

        invariant_fns = run_invariants(invariants, Gfn, graph_loc)

    dwnld_loc = get_download_path(proj_dir)
    sendJobCompleteEmail(to_email, dwnld_loc)
Пример #6
0
def process_input_data(derivatives, graph_loc, graphsize, invariants, 
                        proj_dir, to_email):
  '''
  Extract File name & determine what file corresponds to what for gengraph
  @param session: the session dictionary object
  '''
  """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """

  filesInUploadDir = os.listdir(derivatives)

  fiber_fn, data_atlas_fn = checkFileExtGengraph(filesInUploadDir) # Check & sort files

  ''' Fully qualify file names '''
  fiber_fn = os.path.join(derivatives, fiber_fn)
  
  if not data_atlas_fn:
    data_atlas_fn = settings.ATLASES.keys()[0]
  else:
    data_atlas_fn = os.path.join(derivatives, data_atlas_fn)

  print "data_atlas_fn %s ..." % data_atlas_fn

  Gfn = os.path.join(graph_loc, getFiberID(fiber_fn)) # partial name
  if (graphsize).lower().startswith("s"):
    Gfn += "smgr.graphml"
    graphsize = False # False is small graph
  elif graphsize.lower().startswith("b"):
    Gfn+="bggr.graphml"
    graphsize = True # True is big graph
  else: print '[ERROR]: Graphsize Unkwown' # should never happen
   
  try:
    genGraph(fiber_fn, data_atlas_fn, Gfn, graphsize, **settings.ATLASES) # FIXME: numfibers = 20000 for tests
  except:
    msg = "Hello,\n\nYour most recent job failed either because your fiber streamline file or ROI mask was incorrectly formatted."
    msg += " Please check both and try again.\n\n"
    sendJobFailureEmail(to_email, msg)
    return 911

  # Run ivariants here
  if len(invariants) > 0:
    print "Computing invariants {0}".format(invariants)

    invariant_fns = run_invariants(invariants, Gfn, graph_loc)

  dwnld_loc = get_download_path(proj_dir)
  sendJobCompleteEmail(to_email, dwnld_loc)
Пример #7
0
def scale_convert(selected_files,
                  dl_format,
                  ds_factor,
                  ATLASES,
                  email=None,
                  dwnld_loc=None,
                  zip_fn=None):
    # Debug
    print "Entering scale function ..."
    try:
        if dl_format == "graphml" and ds_factor == 0:
            temp = zipfiles(selected_files, use_genus=True, zip_out_fn=zip_fn)

        else:
            files_to_zip = {}

            for fn in selected_files:
                # No matter what we need a temp file
                print "Creating temp file ..."
                tmpfile = tempfile.NamedTemporaryFile("w",
                                                      delete=False,
                                                      dir="/data/pytmp")
                print "Temp file %s created ..." % tmpfile.name
                tmpfile.close()

                # Downsample only valid for *BIG* human brains!
                # *NOTE: If smallgraphs are ingested this will break

                if ds_factor and get_genus(fn) == "human":
                    if isinstance(ds_factor, int):
                        print "downsampling to factor %d" % ds_factor
                        g = downsample(igraph_io.read_arbitrary(fn, "graphml"),
                                       ds_factor)
                        print "downsample complete"
                    else:
                        g = downsample(igraph_io.read_arbitrary(fn, "graphml"),
                                       atlas=nib_load(ATLASES[ds_factor]))
                else:
                    g = igraph_io.read_arbitrary(fn, "graphml")

                # Write to `some` format
                if dl_format == "mm":
                    igraph_io.write_mm(g, tmpfile.name)
                else:
                    g.write(tmpfile.name, format=dl_format)

                files_to_zip[fn] = tmpfile.name

            temp = zipfiles(files_to_zip,
                            use_genus=True,
                            zip_out_fn=zip_fn,
                            gformat=dl_format)
            # Del temp files
            for tmpfn in files_to_zip.values():
                print "Deleting %s ..." % tmpfn
                os.remove(tmpfn)

    except Exception, msg:
        print "An exception was thrown and caught with message %s!" % msg
        if email:
            msg = """
Hello,\n\nYour most recent job failed to complete.
\nYou may have some partially completed data at {}.\n\n
"""
            sendJobFailureEmail(email, msg, dwnld_loc)
            return
        else:
            return 'An error occurred while processing your request. Please send an email to \