def graph_update(fl_name):
    disp_data = {}
    update_queue = queue.Queue()
    ppr_id = fl_name.replace('.zip','')
    meta_res = get_arxiv_meta.call_api(ppr_id)

    c_title = hashish.compress(meta_res['tit'])
    if not c_title:
        return None

    disp_data['T'] = c_title
    ref_list = []
    updated_node_count = 0
    rp_node = graph.find_one("Paper", "id", hashish.get_hash(c_title))


    if not rp_node or rp_node['complete'] == "F":
        filename = fl_name
        #path = app.config['UPLOAD_FOLDER']
        path = '/home/beingcooper/Desktop/authrank/uploads'

        ref_list = prep_node.prep_node(graph, filename, path, ppr_id, meta_res, True)

    try:
        add_score = ref_list[0]
    except:
        pass

    updated_node_count+=1
    for i in range(1, len(ref_list)):
        ref_node = ref_list[i]
        ref_node_score = ref_node['q_score']

        ref_node['q_score'] = ref_node['q_score'] + add_score
        ref_node.push()
        update_queue.put((ref_list[i], ref_node_score, ref_node['q_score']))
        updated_node_count += 1

    if updated_node_count:
        updated_node_count = update_graph_algo(update_queue, updated_node_count)


    disp_data['C'] =  updated_node_count
    return disp_data
for f1 in os.listdir(seed):
    in1 = seed + str("/" + f1)

    for f2 in os.listdir(in1):
        refs = []
        text = ""
        available = False
        in2 = in1 + str("/" + f2)
        item_count = len(os.listdir(in2))
        try:
            f3 = os.listdir(in2)[0]
            ppr_id = f3.replace(".zip", "")
            meta_res = get_arxiv_meta.call_api(ppr_id)

            prep_node.prep_node(graph, f3, in2, ppr_id, meta_res)
        except:
            continue

"""

        ppr_id = f3.replace('.zip','')
        meta_res = get_arxiv_meta.call_api(ppr_id)

        # Node creation

        c_title = hashish.compress(meta_res['tit'])
        rp_node = graph.find_one("Paper", "id", hashish.get_hash(c_title))
        if rp_node:
            rp = rp_node
        else: