Ejemplo n.º 1
0
def invariant_compute(invariants, graph_fn, invariants_path, data_dir,
                      in_graph_format, to_email):
    """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """
    dwnld_loc = get_download_path(data_dir)

    try:
        invariant_fns = run_invariants(invariants,
                                       graph_fn,
                                       invariants_path,
                                       graph_format=in_graph_format)

        if isinstance(invariant_fns, str):
            raise Exception
        else:
            print 'Invariants for annoymous project %s complete...' % graph_fn

    except Exception, msg:
        msg = """
Hello,\n\nYour most recent job for '%s' failed possibly because:\n- '%s'.
\n\n"You may have some partially completed data at {}.
\nPlease check these and try again.\n\n
""" % (os.path.basename(graph_fn), msg)

        sendJobFailureEmail(to_email, msg, dwnld_loc)
        return
Ejemplo n.º 2
0
def convert(media_root, uploadedFiles, convert_file_save_loc, input_format, output_format, to_email):
  # Send begin job email
  content = "Hello,\n\n You requested the following files be converted:"
  for fn in uploadedFiles:
    content += "\n- " + os.path.basename(fn)
  content += "\n\nTo the following formats:"
  for fmt in output_format:
    content += "\n- " + fmt

  sendEmail(to_email, "Job launch Notification", content+"\n\n")
  # End Email junk

  err_msg = ""
  outfn = ""
  for fn in uploadedFiles:
    outfn, err_msg = convertTo.convert_graph(fn, input_format, convert_file_save_loc, *output_format)

  dwnld_loc = get_download_path(convert_file_save_loc)
  print "Download path: {0}".format(dwnld_loc)

  if (err_msg):
    err_msg = "Your job completed with errors. The result can be found at {}.\n\n"\
        "Message: %s\n\n" % err_msg
    sendJobFailureEmail(to_email, err_msg, dwnld_loc)
  else:
    sendJobCompleteEmail(to_email,dwnld_loc) 
Ejemplo n.º 3
0
def invariant_compute(invariants, graph_fn, invariants_path, data_dir, in_graph_format, to_email):
  """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """
  dwnld_loc = get_download_path(data_dir)

  try:
    invariant_fns = run_invariants(invariants, graph_fn,
                invariants_path, 
                graph_format=in_graph_format)

    if isinstance(invariant_fns, str):
      raise Exception
    else:
      print 'Invariants for annoymous project %s complete...' % graph_fn

  except Exception, msg:
    msg = """
Hello,\n\nYour most recent job for '%s' failed possibly because:\n- '%s'.
\n\n"You may have some partially completed data at {}.
\nPlease check these and try again.\n\n
""" % (os.path.basename(graph_fn), msg)

    sendJobFailureEmail(to_email, msg, dwnld_loc)
    return
Ejemplo n.º 4
0
def _ingest_files(fns, genus, tb_name):

  print "Connecting to database %s ..." % db_args["default"]["NAME"]
  db = MySQLdb.connect(host=db_args["default"]["HOST"], user=db_args["default"]["USER"], 
     passwd=db_args["default"]["PASSWORD"], db=db_args["default"]["NAME"])
  db.autocommit(True)

  with closing(db.cursor()) as cursor:
    cursor.connection.autocommit(True)

    for graph_fn in fns:
      print "Processing %s ..." % graph_fn
      mtime = os.stat(graph_fn).st_mtime # get modification time
      g_changed = True
      # In DB and modified
      test_qry ="select g.mtime from %s.%s as g where g.filepath = \"%s\";" % (db_args["default"]["NAME"], tb_name, graph_fn)

      if cursor.execute(test_qry): # Means graph already in DB
        if cursor.fetchall()[0][0] == os.stat(graph_fn).st_mtime: # Means graphs hasn't changed since ingest
          g_changed = False
          print "Ignoring %s ..." % graph_fn
        else:
          cursor.execute("delete from %s.%s where filepath = \"%s\";" % (db_args["default"]["NAME"], tb_name, graph_fn))
          print "  ===> Updating %s ..." % graph_fn

      if g_changed: # Means graph has changed since ingest OR was never in DB to start with
        # Collect all the attributes etc ..
        g = igraph_io.read_arbitrary(graph_fn, informat="graphml", headers_only=True)

        vertex_attrs = g.vs.attribute_names()
        edge_attrs = g.es.attribute_names()
        graph_attrs = g.attributes()
        vcount = g.vcount()
        ecount = g.ecount()
        # Give some default values if none exist
        if "sensor" in graph_attrs: sensor = g["sensor"]
        else: sensor = ""
        if "source" in graph_attrs: source = g["source"]
        else: source = ""
        if "region" in graph_attrs: region = g["region"]
        else: region = ""
        if "project" in graph_attrs: project = g["project"]
        else: project = ""

        #url = "http://openconnecto.me/mrdata/graphs/"+("/".join(graph_fn.replace("\\", "/").split('/')[-2:]))
        url = get_download_path(graph_fn)

        # This statement puts each graph into the DB
        qry_stmt = "insert into %s.%s values (\"%s\",\"%s\",\"%s\",\"%s\",%d,%d,\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",%f,\"%s\");" \
             % (db_args["default"]["NAME"], tb_name, os.path.abspath(graph_fn), genus, region, project, 
                 np.int64(np.float64(vcount)), np.int64(np.float64(ecount)), str(graph_attrs)[1:-1].replace("'",""), 
                 str(vertex_attrs)[1:-1].replace("'",""),
                 str(edge_attrs)[1:-1].replace("'",""), sensor, source, mtime, url)

        cursor.execute(qry_stmt)
Ejemplo n.º 5
0
def convert_graph_prog(request, webargs):
    if (request.method == 'POST' and webargs):
        split_webargs = webargs.split("/")
        link_only = False
        to_email = split_webargs[0]

        if (not check_email(to_email)):
            return HttpResponse("ERROR: Incorrect email address format")

        try:
            in_graph_format = split_webargs[1]
            if in_graph_format not in ("graphml", "ncol", "edgelist", "lgl",
                                       "pajek", "graphdb", "numpy", "mat"):
                return HttpResponse("ERROR: Unknown graph input format")

            out_graph_format = list(set(split_webargs[2].split(",")))
            if not out_graph_format:
                return HttpResponse(
                    "ERROR: No output formats to compute provided")

            if len(split_webargs) == 4:
                if split_webargs[3] != "l":
                    return HttpResponse(
                        "ERROR: Final parameter '{0}', expected 'l'".format(
                            split_webargs[3]))
                else:
                    link_only = True
        except:
            return HttpResponse(
                "ERROR: Error with input graph format OR invariants chosen")

        save_dir, convert_file_save_loc = getworkdirs()
        uploaded_files = writeBodyToDisk(
            request.body, save_dir)  # can only be one file # TODO: Check me

        # Check for zip
        if os.path.splitext(uploaded_files[0])[1].strip() == '.zip':
            unzip(uploaded_files[0], save_dir)
            # Delete zip so its not included in the graphs we uploaded
            os.remove(uploaded_files[0])
            uploaded_files = glob(os.path.join(
                save_dir, "*"))  # get the uploaded file names

        task_convert.delay(settings.MEDIA_ROOT, uploaded_files,
                           convert_file_save_loc, in_graph_format,
                           out_graph_format, to_email)

        if link_only:
            return HttpResponse(get_download_path(convert_file_save_loc))

        return HttpResponse("Successful job submission, please " \
                              "await reception & completion emails at {0}".format(to_email))
    else:
        return HttpResponse("There was an error! If you believe it " \
                              "is on our end please email: {0}".format(settings.DEFAULT_FROM_EMAIL))
Ejemplo n.º 6
0
def process_input_data(derivatives, graph_loc, graphsize, invariants, proj_dir,
                       to_email):
    '''
  Extract File name & determine what file corresponds to what for gengraph
  @param session: the session dictionary object
  '''
    """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """

    filesInUploadDir = os.listdir(derivatives)

    fiber_fn, data_atlas_fn = checkFileExtGengraph(
        filesInUploadDir)  # Check & sort files
    ''' Fully qualify file names '''
    fiber_fn = os.path.join(derivatives, fiber_fn)

    if not data_atlas_fn:
        data_atlas_fn = settings.ATLASES.keys()[0]
    else:
        data_atlas_fn = os.path.join(derivatives, data_atlas_fn)

    print "data_atlas_fn %s ..." % data_atlas_fn

    Gfn = os.path.join(graph_loc, getFiberID(fiber_fn))  # partial name
    if (graphsize).lower().startswith("s"):
        Gfn += "smgr.graphml"
        graphsize = False  # False is small graph
    elif graphsize.lower().startswith("b"):
        Gfn += "bggr.graphml"
        graphsize = True  # True is big graph
    else:
        print '[ERROR]: Graphsize Unkwown'  # should never happen

    try:
        genGraph(fiber_fn, data_atlas_fn, Gfn, graphsize,
                 **settings.ATLASES)  # FIXME: numfibers = 20000 for tests
    except:
        msg = "Hello,\n\nYour most recent job failed either because your fiber streamline file or ROI mask was incorrectly formatted."
        msg += " Please check both and try again.\n\n"
        sendJobFailureEmail(to_email, msg)
        return 911

    # Run ivariants here
    if len(invariants) > 0:
        print "Computing invariants {0}".format(invariants)

        invariant_fns = run_invariants(invariants, Gfn, graph_loc)

    dwnld_loc = get_download_path(proj_dir)
    sendJobCompleteEmail(to_email, dwnld_loc)
Ejemplo n.º 7
0
def process_input_data(derivatives, graph_loc, graphsize, invariants, 
                        proj_dir, to_email):
  '''
  Extract File name & determine what file corresponds to what for gengraph
  @param session: the session dictionary object
  '''
  """
  if isinstance(session, str) or isinstance(session, unicode):
    f = open(session, "rb")
    session = pickle.load(f)
    f.close()
  """

  filesInUploadDir = os.listdir(derivatives)

  fiber_fn, data_atlas_fn = checkFileExtGengraph(filesInUploadDir) # Check & sort files

  ''' Fully qualify file names '''
  fiber_fn = os.path.join(derivatives, fiber_fn)
  
  if not data_atlas_fn:
    data_atlas_fn = settings.ATLASES.keys()[0]
  else:
    data_atlas_fn = os.path.join(derivatives, data_atlas_fn)

  print "data_atlas_fn %s ..." % data_atlas_fn

  Gfn = os.path.join(graph_loc, getFiberID(fiber_fn)) # partial name
  if (graphsize).lower().startswith("s"):
    Gfn += "smgr.graphml"
    graphsize = False # False is small graph
  elif graphsize.lower().startswith("b"):
    Gfn+="bggr.graphml"
    graphsize = True # True is big graph
  else: print '[ERROR]: Graphsize Unkwown' # should never happen
   
  try:
    genGraph(fiber_fn, data_atlas_fn, Gfn, graphsize, **settings.ATLASES) # FIXME: numfibers = 20000 for tests
  except:
    msg = "Hello,\n\nYour most recent job failed either because your fiber streamline file or ROI mask was incorrectly formatted."
    msg += " Please check both and try again.\n\n"
    sendJobFailureEmail(to_email, msg)
    return 911

  # Run ivariants here
  if len(invariants) > 0:
    print "Computing invariants {0}".format(invariants)

    invariant_fns = run_invariants(invariants, Gfn, graph_loc)

  dwnld_loc = get_download_path(proj_dir)
  sendJobCompleteEmail(to_email, dwnld_loc)
Ejemplo n.º 8
0
def convert_graph_prog(request, webargs):
  if(request.method == 'POST' and webargs):
    split_webargs = webargs.split("/")
    link_only = False
    to_email = split_webargs[0]

    if (not check_email(to_email)):
      return HttpResponse("ERROR: Incorrect email address format")

    try:
      in_graph_format = split_webargs[1]
      if in_graph_format not in ("graphml", "ncol", "edgelist", "lgl", "pajek", "graphdb", "numpy", "mat"):
        return HttpResponse("ERROR: Unknown graph input format")

      out_graph_format = list(set(split_webargs[2].split(",")))
      if not out_graph_format: 
        return HttpResponse("ERROR: No output formats to compute provided")

      if len(split_webargs) == 4:
        if split_webargs[3] != "l":
          return HttpResponse("ERROR: Final parameter '{0}', expected 'l'".format(split_webargs[3]))
        else:
          link_only = True
    except:
      return HttpResponse("ERROR: Error with input graph format OR invariants chosen")


    save_dir, convert_file_save_loc = getworkdirs()
    uploaded_files = writeBodyToDisk(request.body, save_dir)# can only be one file # TODO: Check me

    # Check for zip
    if os.path.splitext(uploaded_files[0])[1].strip() == '.zip':
      unzip(uploaded_files[0], save_dir)
      # Delete zip so its not included in the graphs we uploaded
      os.remove(uploaded_files[0])
      uploaded_files = glob(os.path.join(save_dir, "*")) # get the uploaded file names

    task_convert.delay(settings.MEDIA_ROOT, uploaded_files, convert_file_save_loc,
    in_graph_format, out_graph_format, to_email)

    if link_only:
      return HttpResponse(get_download_path(convert_file_save_loc))

    return HttpResponse("Successful job submission, please " \
                          "await reception & completion emails at {0}".format(to_email))
  else:
    return HttpResponse("There was an error! If you believe it " \
                          "is on our end please email: {0}".format(settings.DEFAULT_FROM_EMAIL))
Ejemplo n.º 9
0
def download(request):
  MAX_NUM_GRAPH_DLS = 1
  ATLASES = {"desikan": os.path.join(settings.ATLAS_DIR, "desikan_atlas.nii") ,
              "slab": os.path.join(settings.ATLAS_DIR, "slab_atlas.nii")}

  if request.method == "POST":
    if request.POST.keys()[0] == "query_type": # Means we are doing a search
      form = DownloadQueryForm(request.POST)
      if form.is_valid():
        gdmof = GraphDownloadModel.objects.filter # typedef
        st = str(".*"+ ".*".join(form.cleaned_data["query"].strip().split()) +".*") # Search Term

        if form.cleaned_data["query_type"] == "all":
          table = GraphTable(
              gdmof(genus__iregex=st)      | gdmof(filepath__iregex=st) |
              gdmof(region__iregex=st)     | gdmof(numvertex__iregex=st)|
              gdmof(numedge__iregex=st)    | gdmof(graphattr__iregex=st)|
              gdmof(vertexattr__iregex=st) |  gdmof(edgeattr__iregex=st)|
              gdmof(sensor__iregex=st)     | gdmof(source__iregex=st)   |
              gdmof(project__iregex=st)
           )
        elif form.cleaned_data["query_type"] == "attribute":
          table = GraphTable(
              gdmof(graphattr__iregex=st)| gdmof(vertexattr__iregex=st)|
              gdmof(edgeattr__iregex=st)
           )
        elif form.cleaned_data["query_type"] == "name":
          table = GraphTable( gdmof(filepath__iregex=st) )
        elif form.cleaned_data["query_type"] == "genus":
          table = GraphTable( gdmof(genus__iregex=st) )
        elif form.cleaned_data["query_type"] == "region":
          table = GraphTable( gdmof(region__iregex=st) )
        elif form.cleaned_data["query_type"] == "project":
          table = GraphTable( gdmof(project__iregex=st) )

        # NOTE: Or equal to as well
        elif form.cleaned_data["query_type"] == "numvertex_gt":
          table = GraphTable( gdmof(numvertex__gte=int(form.cleaned_data["query"])) )
        elif form.cleaned_data["query_type"] == "numedge_gt":
          table = GraphTable( gdmof(numedge__gte=int(form.cleaned_data["query"])) )

        elif form.cleaned_data["query_type"] == "numvertex_lt":
          table = GraphTable( gdmof(numvertex__lte=int(form.cleaned_data["query"])) )
        elif form.cleaned_data["query_type"] == "numedge_lt":
          table = GraphTable( gdmof(numedge__lte=int(form.cleaned_data["query"])) )

        elif form.cleaned_data["query_type"] == "sensor":
          table = GraphTable( gdmof(sensor__iregex=st) )
        elif form.cleaned_data["query_type"] == "source":
          table = GraphTable( gdmof(source__iregex=st) )

        if (len(table.rows) == 0):
          table = None # Get the no results message to show up
        else:
          table.set_html_name("Search Results")

        return render_to_response("downloadgraph.html", {"genera":[],
          "query_result":table}, context_instance=RequestContext(request))
      else:
        return HttpResponseRedirect(get_script_prefix()+"download")

    else: # We just want to download specific files

      form = DownloadGraphsForm(request.POST)

      if form.is_valid():
        selected_files = request.POST.getlist("selection")
        ds_factor = 0 if not request.POST.get("ds_factor") else request.POST.get("ds_factor")

        if ds_factor not in ATLASES.keys():
          ds_factor = int(ds_factor)

        dl_format = form.cleaned_data["dl_format"]

        if not selected_files:
          return HttpResponseRedirect(get_script_prefix()+"download")

        # Something selected for dl/Convert+dl
        else:
          data_dir = os.path.join(settings.MEDIA_ROOT, "public",
                                 strftime("download_%a%d%b%Y_%H.%M.%S/", localtime()))
          dwnld_loc = get_download_path(data_dir)

          sendEmail(form.cleaned_data["Email"], "Job launch notification",
                  "Your download request was received. You will receive an email when it completes.\n\n")

          # Testing only
          task_scale.delay(selected_files, dl_format, ds_factor, ATLASES, form.cleaned_data["Email"], 
          dwnld_loc, os.path.join(data_dir, "archive.zip")) # Testing only

          request.session['success_msg'] = \
"""
Your job successfully launched. You should receive an email when your job begins and another 
one when it completes.<br/> The process may take several hours (dependent on graph size). 
If your job fails you will receive an email notification as well.<br/>
If you do not see an email in your <i>Inbox</i> check the <i>Spam</i> folder and add 
<code>[email protected]</code> to your safe list.
"""
          return HttpResponseRedirect(get_script_prefix()+'success')
      else:
        return HttpResponseRedirect(get_script_prefix()+"download")

  else:
    tbls = []
    for genus in settings.GENERA:
      table = GraphTable(GraphDownloadModel.objects.filter(genus=genus))
      table.set_html_name(genus.capitalize()) # Set the html __repr__
      # TODO: Alter per_page limit to +25
      RequestConfig(request, paginate={"per_page":25}).configure(table) # Each table re-render given a request
      #table.columns["url"].header = "Download Link"

      dl_form = DownloadGraphsForm()
      dl_form.set_name(genus)

      tbls.append((table, dl_form))

  return render_to_response("downloadgraph.html", {"genera":tbls, "query":DownloadQueryForm()},
                            context_instance=RequestContext(request))
Ejemplo n.º 10
0
def _ingest_files(fns, genus, tb_name):

    print "Connecting to database %s ..." % db_args["default"]["NAME"]
    db = MySQLdb.connect(host=db_args["default"]["HOST"],
                         user=db_args["default"]["USER"],
                         passwd=db_args["default"]["PASSWORD"],
                         db=db_args["default"]["NAME"])
    db.autocommit(True)

    with closing(db.cursor()) as cursor:
        cursor.connection.autocommit(True)

        for graph_fn in fns:
            print "Processing %s ..." % graph_fn
            mtime = os.stat(graph_fn).st_mtime  # get modification time
            g_changed = True
            # In DB and modified
            test_qry = "select g.mtime from %s.%s as g where g.filepath = \"%s\";" % (
                db_args["default"]["NAME"], tb_name, graph_fn)

            if cursor.execute(test_qry):  # Means graph already in DB
                if cursor.fetchall()[0][0] == os.stat(
                        graph_fn
                ).st_mtime:  # Means graphs hasn't changed since ingest
                    g_changed = False
                    print "Ignoring %s ..." % graph_fn
                else:
                    cursor.execute(
                        "delete from %s.%s where filepath = \"%s\";" %
                        (db_args["default"]["NAME"], tb_name, graph_fn))
                    print "  ===> Updating %s ..." % graph_fn

            if g_changed:  # Means graph has changed since ingest OR was never in DB to start with
                # Collect all the attributes etc ..
                g = igraph_io.read_arbitrary(graph_fn,
                                             informat="graphml",
                                             headers_only=True)

                vertex_attrs = g.vs.attribute_names()
                edge_attrs = g.es.attribute_names()
                graph_attrs = g.attributes()
                vcount = g.vcount()
                ecount = g.ecount()
                # Give some default values if none exist
                if "sensor" in graph_attrs: sensor = g["sensor"]
                else: sensor = ""
                if "source" in graph_attrs: source = g["source"]
                else: source = ""
                if "region" in graph_attrs: region = g["region"]
                else: region = ""
                if "project" in graph_attrs: project = g["project"]
                else: project = ""

                #url = "http://openconnecto.me/mrdata/graphs/"+("/".join(graph_fn.replace("\\", "/").split('/')[-2:]))
                url = get_download_path(graph_fn)

                # This statement puts each graph into the DB
                qry_stmt = "insert into %s.%s values (\"%s\",\"%s\",\"%s\",\"%s\",%d,%d,\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",%f,\"%s\");" \
                     % (db_args["default"]["NAME"], tb_name, os.path.abspath(graph_fn), genus, region, project,
                         np.int64(np.float64(vcount)), np.int64(np.float64(ecount)), str(graph_attrs)[1:-1].replace("'",""),
                         str(vertex_attrs)[1:-1].replace("'",""),
                         str(edge_attrs)[1:-1].replace("'",""), sensor, source, mtime, url)

                cursor.execute(qry_stmt)