def getMarvelData(k=30, graph=marvelURL):

    # load graph data
    g = tlp.loadGraph(graph)

    # compute node degree
    viewMetric = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", viewMetric)

    viewIcon = g.getStringProperty("viewIcon")
    viewLabel = g.getStringProperty("viewLabel")
    degree = {}
    for n in viewIcon.getNodesEqualTo("md-human"):
        degree[viewLabel[n]] = int(viewMetric[n])
    best = OrderedDict(sorted(degree.items(), key=lambda t: t[1],
                              reverse=True))
    bestk = list(islice(best.items(), k))

    #produce a csv return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("name", "degree"))
    for n in bestk:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_marvel.csv"
    output.headers["Content-type"] = "text/csv"
    return output
def convertGraphToD3Static(url):
    g = tlp.loadGraph(url)
    name = g.getStringProperty("name")
    viewLayout = g.getLayoutProperty("viewLayout")

    nodes = []
    links = []

    for n in g.getNodes():
        dict = {}
        dict["id"] = name[n]
        dict["x"] = viewLayout[n][0]
        dict["y"] = viewLayout[n][1]
        nodes.append(dict)

    for e in g.getEdges():
        dict = {}
        source = {}
        target = {}
        source["x"] = viewLayout[g.source(e)][0]
        source["y"] = viewLayout[g.source(e)][1]
        target["x"] = viewLayout[g.target(e)][0]
        target["y"] = viewLayout[g.target(e)][1]
        dict["source"] = source
        dict["target"] = target
        dict["sourceLabel"] = name[g.source(e)]
        dict["targetLabel"] = name[g.target(e)]
        links.append(dict)

    graph = {}
    graph['links'] = links
    graph['nodes'] = nodes

    return graph
Exemple #3
0
def getCloud():

    # load graph data
    g = tlp.loadGraph('spiderman.tlpb')

    #compute node degree
    metricprop = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", metricprop)
    #g.applyDoubleAlgorithm("Betweenness Centrality", metricprop)

    #get 15 characters with the highest Degree (most published)
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")
    val = {}
    for n in picon.getNodesEqualTo("md-human"):
        val[plabel[n]] = int(metricprop[n])
    best = OrderedDict(sorted(val.items(), key=lambda t: t[1], reverse=True))
    best15 = list(islice(best.items(), 50))

    #produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("name", "val"))
    for n in best15:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_wc_marvel.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #4
0
def radial_chart():
    global hero
    if request.method == 'POST':
        hero = request.form.get('heroes')

    list_heroes = []
    g = tlp.loadGraph('marvel.tlpb')

    # compute node degree
    metricprop = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", metricprop)

    # get  characters with the highest Degree (most published)
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")
    i = 0
    # iterate in descending order (max to min)
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-human":
            list_heroes.append(plabel[n])
            i = i + 1
        if (i == 50):
            break
    for i in list_heroes:
        if hero in i:
            hero = i

    list_heroes.append(hero)
    return render_template("radial_chart.html",
                           title="Les 10 personnages les plus proches de " +
                           hero,
                           list_heroes=list_heroes)
Exemple #5
0
def getData_hist():
    # load graph data
    g = tlp.loadGraph('marvel.tlpb')

    # compute node degree
    metricprop = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", metricprop)

    # get 10 characters with the highest Degree (most published)
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")
    best10 = []
    i = 0
    # iterate in descending order (max to min)
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-human":
            best10.append((plabel[n], metricprop[n]))
            i += 1
        if i == 10:
            break
    # produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("name", "val"))
    for n in best10:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=getData_hist.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #6
0
def getDataWordCloud():
    global hero
    global nb_heroes
    global best_heroes
    list_heroes = []
    edge_value = 0
    word_cloud = []
    heroes_graph = tlp.loadGraph('heroes_final.tlpb')
    node = heroes_graph['viewLabel'].getNodesEqualTo(hero).next()
    heroes_graph['viewSelection'].setAllNodeValue(False)
    heroes_graph['viewSelection'][node] = True
    params = tlp.getDefaultPluginParameters('Reachable SubGraph', heroes_graph)
    params['distance'] = 1
    params['edge direction'] = 'all edges'
    heroes_graph.applyBooleanAlgorithm('Reachable SubGraph', params)
    heroes_graph.addSubGraph(heroes_graph['viewSelection'],
                             name="SubGraph_hero")
    SubGraph_hero = heroes_graph.getSubGraph("SubGraph_hero")
    # compute node degree
    metricprop = SubGraph_hero.getDoubleProperty("viewMetric")
    SubGraph_hero.applyDoubleAlgorithm("Degree", metricprop)

    # get 10 characters with the highest Degree (most published)
    picon = SubGraph_hero.getStringProperty("viewIcon")
    plabel = SubGraph_hero.getStringProperty("viewLabel")
    i = 0
    # iterate in descending order (max to min)
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-human" and plabel[n] in best_heroes:
            if (plabel[node] == hero and plabel[n] != hero):
                for edge in tlp.Graph.allEdges(SubGraph_hero, node):
                    if tlp.Graph.source(SubGraph_hero,
                                        edge) == n or tlp.Graph.target(
                                            SubGraph_hero, edge) == n:
                        edge_value = tlp.Graph.getEdgePropertiesValues(
                            SubGraph_hero, edge)['value']
                        break
                word_cloud.append((plabel[node], plabel[n], edge_value))

    # produce a csv  return it
        csvdata = io.StringIO()
        writer = csv.writer(csvdata, delimiter=",")
        if (plabel[node] == hero and plabel[n] == hero):
            i -= 1
        i += 1
        if (i == nb_heroes):
            break

    writer.writerow(("group", "name", "val"))
    for n in word_cloud:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_word_cloud.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #7
0
def main(file, graph):
    graph = tlp.loadGraph(file, graph)
    ELP = graph.getDoubleProperty("ELP")
    Weigth = graph.getDoubleProperty("Weigth")
    Voisin = graph.getIntegerProperty("Voisin")
    Classe = graph.getStringProperty("Classe")
    viewBorderColor = graph.getColorProperty("viewBorderColor")
    viewBorderWidth = graph.getDoubleProperty("viewBorderWidth")
    viewColor = graph.getColorProperty("viewColor")
    viewFont = graph.getStringProperty("viewFont")
    viewFontSize = graph.getIntegerProperty("viewFontSize")
    viewIcon = graph.getStringProperty("viewIcon")
    viewLabel = graph.getStringProperty("viewLabel")
    viewLabelBorderColor = graph.getColorProperty("viewLabelBorderColor")
    viewLabelBorderWidth = graph.getDoubleProperty("viewLabelBorderWidth")
    viewLabelColor = graph.getColorProperty("viewLabelColor")
    viewLabelPosition = graph.getIntegerProperty("viewLabelPosition")
    viewLayout = graph.getLayoutProperty("viewLayout")
    viewMetric = graph.getDoubleProperty("viewMetric")
    viewRotation = graph.getDoubleProperty("viewRotation")
    viewSelection = graph.getBooleanProperty("viewSelection")
    viewShape = graph.getIntegerProperty("viewShape")
    viewSize = graph.getSizeProperty("viewSize")
    viewSrcAnchorShape = graph.getIntegerProperty("viewSrcAnchorShape")
    viewSrcAnchorSize = graph.getSizeProperty("viewSrcAnchorSize")
    viewTexture = graph.getStringProperty("viewTexture")
    viewTgtAnchorShape = graph.getIntegerProperty("viewTgtAnchorShape")
    viewTgtAnchorSize = graph.getSizeProperty("viewTgtAnchorSize")

    for e in graph.getEdges():
        Weigth[e] = viewMetric[e]
        ELP[e] = (1 - viewMetric[e])
        # print(graph.getEdgePropertiesValues(e))
        # print(ELP[e])
        if viewColor[graph.source(e)] == viewColor[graph.target(e)]:
            viewColor[e] = viewColor[graph.source(e)]
        # print(viewColor[graph.source(e)])
        # print(viewColor[graph.target(e)])

    for n in graph.getNodes():
        print(n)
        vois = 0
        if viewColor[n] == (200, 0, 0, 255):
            Classe[n] = 'frontier'
        elif viewColor[n] == (0, 200, 0, 255):
            Classe[n] = '1'
        else:
            Classe[n] = '0'
        for i in graph.getInOutEdges(n):
            vois += 1
        Voisin[n] = vois
        # print(viewColor[n])
        # print(ELP[e])

    return graph
Exemple #8
0
def getTest():
    # # route pour tester si les choses fonctionnent bien comme on le veut

    # # test pour savoir si graphe_marvel_avec_comics fonctionne normalement

    # g = tlp.loadGraph('marvel.tlpb')
    # plabel = g.getStringProperty("viewLabel")

    # dico_nodes = graphe_marvel_avec_comics(g)
    # print(len(dico_nodes))

    # s = 0
    # for n in dico_nodes.keys(): #Les clés de dico_nodes sont les heros
    #     print(len(dico_nodes[n]))
    #     s += 1
    # print("comparons mnt s et dico_nodes")
    # print(s)
    # print(len(dico_nodes))
    # #dico_nodes est donc non vide, fonctionne

    # # Test pour voir suppression_node fonctionne
    # # On va supprimer spiderman de son univers

    # personnage = "Spider-Man"
    # (subG,node_p) = get_graph_from_char(g,personnage)
    # plabel = g.getStringProperty("viewLabel")
    # dico_nodes = graphe_marvel_sans_comics(subG) # Fonctionne
    # color =  g.getColorProperty("viewColor")
    # (nodes,nodes_id) = creation_nodes(plabel,dico_nodes,color) #Fonctionne
    # links = liens(dico_nodes,nodes_id)

    # (dico_nodes_apres_suppression, links_apres_suppression) = suppression_node(dico_nodes,nodes_id,links, node_p)

    # output = ""
    # for n in dico_nodes.keys():
    #     output = output + plabel[n] + "<br>"
    # print("Les personnages dans dico_nodes apres suppression sont au nombre de :")
    # print(len(dico_nodes))
    # #Quand on regarde les heros presents apres suppression, Spider-Man n'est pas present
    # #On a effectivement perdu 1 sur len dico_nodes
    # #Il faudrait s'assurer que c'est le cas aussi dans links

    graphe_marvel_tulip()
    g = tlp.loadGraph('heroes.tlpb')
    tlp.saveGraph(g, "heroes.tlpx")

    plabel = g.getStringProperty("viewLabel")

    output = ""
    compteur = 0
    for n in g.getNodes():
        output = output + plabel[n] + "<br>"
        compteur += 1
    print(compteur)
    return output
Exemple #9
0
def getTree():
    # Ici, on veut calculer le degré de chaque personnage de l'univers, ainsi que la somme de ces degrés
    # On fait ensuite un dictionnaire ordonné par la valeur du degré
    # On prend assez de personnages pour avoir 50% de la somme des degrés totaux

    # load graph data
    g = tlp.loadGraph('marvel.tlpb')

    #compute node degree
    metricprop = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", metricprop)

    #get 15 characters with the highest Degree (most published)
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")
    val = {}
    deg_total = 0
    for n in picon.getNodesEqualTo("md-human"):
        val[plabel[n]] = int(metricprop[n])
        deg_total = deg_total + int(metricprop[n])

    best = OrderedDict(sorted(val.items(), key=lambda t: t[1], reverse=True))

    #half = deg_total/2
    half = deg_total / 3

    sum = 0
    main_char = []
    while (sum < half):
        #on recup les personnages qui sont les plus importants
        el = best.popitem(last=False)
        # cle valeur  == nom degre
        main_char.append([el[0], el[1]])
        sum = sum + el[1]

    # attention, en faisant comme ça, on peut depasser half et donc avoir plus de la moitié des publications

    #produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("name", "parent", "value"))
    writer.writerow(("Marvel", "", ""))
    for h in main_char:
        nom = str(h[0])
        score = str(h[1])
        #writer.writerow((nom,"personnages principaux",score))
        writer.writerow((nom, "Marvel", score))

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_wc_marvel.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #10
0
def getForce():
    g = tlp.loadGraph('marvel.tlpb')  #Fonctionne

    #### Temporaire ####
    personnage = "Spider-Man"
    (subG, node_p) = get_graph_from_char(g, personnage)
    ####################

    #Ce bloc est a remplacer une fois que l'utilisateur peut lui meme choisir le personnage qu'il veut

    plabel = g.getStringProperty("viewLabel")
    #dico_nodes = graphe_marvel_sans_comics(g) #Fonctionne
    dico_nodes = graphe_marvel_sans_comics(subG)  # Fonctionne

    color = g.getColorProperty("viewColor")
    (nodes, nodes_id) = creation_nodes(plabel, dico_nodes, color)  #Fonctionne
    links = liens(dico_nodes, nodes_id)

    # Je suis pas certain qu'on veuille mapper la taille ici
    #nodes = set_size(nodes,nodes_id,dico_nodes)

    ##### Temporaire #####
    # on montre les 50 personnages du graphe d'amis de Spiderman
    # On recup les X nodes de plus haut degre
    nodes = sorted(nodes, key=lambda t: t['size'], reverse=True)
    nodes_sliced = list(islice(nodes, 50))
    nodes_sliced = list(islice(nodes, 150))
    nodes_sliced = sorted(nodes_sliced, key=lambda t: t['size'], reverse=False)
    nodes_sliced = list(islice(nodes_sliced, 60))

    # On recupere juste les heros qui nous intéressent
    id_sliced_nodes = []
    for el in nodes_sliced:
        id_sliced_nodes.append(el["id"])
    links_sliced = []
    for el in links:  # el est un dictionnaire
        if ((el["source"] in id_sliced_nodes)
                and (el["target"] in id_sliced_nodes)):
            links_sliced.append(el)

    ##### Temporaire #####

    dico = {}
    dico["nodes"] = nodes_sliced
    dico["links"] = links_sliced

    resp = json.dumps(dico)  # resp = json.dumps(nodes) + json.dumps(links)

    output = make_response(resp)
    output.headers[
        "Content-Disposition"] = "attachment; filename=force_directed_graph.json"
    output.headers["Content-type"] = "json"
    return output
def actorsHeatmap(k=40):
    k = 40 if k < 10 or k > 80 else k

    g = tlp.loadGraph(FRNetworkURL)
    name = g.getStringProperty("name")
    viewMetric = g.getDoubleProperty("viewMetric")
    value = g.getIntegerProperty("value")

    degree = g.getDoubleProperty("degree")
    params = tlp.getDefaultPluginParameters("Degree", g)
    params['metric'] = value
    g.applyDoubleAlgorithm("Degree", degree, params)

    topActors = []
    actors = []

    i = 0
    for n in viewMetric.getSortedNodes(ascendingOrder=False):
        if (i == k):
            break
        else:
            i = i + 1
            topActors.append(n)
            actor = {}
            actor['name'] = name[n]
            actors.append(actor)

    data = []
    for i in range(k):
        sum = 0
        for j in range(k):
            row = {}
            row['actor1'] = name[topActors[i]]
            row['actor2'] = name[topActors[j]]
            row['collaborations'] = 0
            for e in g.getEdges(topActors[i], topActors[j]):
                row['collaborations'] += value[e]
                sum += value[e]
            for e in g.getEdges(topActors[j], topActors[i]):
                row['collaborations'] += value[e]
                sum += value[e]
            data.append(row)
        data[i * k + i]['collaborations'] = sum  # diagonal values
        actors[i]['count'] = sum

    return render_template(
        "acteurs_heatmap.html",
        title=
        "Heatmap des acteurs et actrices francophones les plus prolifiques",
        actors=actors,
        data=data)
Exemple #12
0
def getRatio():
    #load graph data
    graph = tlp.loadGraph('marvel.tlpb')
    dico_nodes = {}
    dico_nodes_0_doublon = {}
    # dictionnaire dans lequel la clé est une node
    # et la valeur est la liste d'adjacence de distance 2 de cette node.
    picon = graph.getStringProperty("viewIcon")
    # On ne s'intéresse qu'aux heros
    for n in picon.getNodesEqualTo("md-human"):
        dico_nodes[n] = []
        dico_nodes_0_doublon[n] = []
        neigh = graph.getInOutNodes(n)
        # On récupère tous les comics dans lequel le hero apparaît
        for v in neigh:
            #on récupère tous les heros de ce comic, sauf n
            #on les met dans la liste d'adj si ils n'y sont pas deja
            v_neigh = graph.getInOutNodes(v)
            for u in v_neigh:
                # Changer pour mettre un poids à la place
                if (u != n):
                    dico_nodes[n].append(u)
                    if (u not in dico_nodes_0_doublon[n]):
                        dico_nodes_0_doublon[n].append(u)

    ratio = []
    for n in dico_nodes:
        if (len(dico_nodes[n]) != 0):
            ratio.append(len(dico_nodes_0_doublon[n]) / len(dico_nodes[n]))
        else:
            ratio.append(0)

    ratio_dispersion = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]

    for i in ratio:
        i = i * 10
        i = int(i)
        ratio_dispersion[i] = ratio_dispersion[i] + 1

    #produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")
    writer.writerow(["ratio", "quantite"])
    for n in range(len(ratio_dispersion) - 1):
        writer.writerow(([n / 10, n / 10 + 0.1], ratio_dispersion[n]))

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_histo_marvel.csv"
    output.headers["Content-type"] = "text/csv"
    return output
def communities():
    # load graph data
    g1 = convertGraphToD3Static(communitiesURL)
    g = tlp.loadGraph(FRCinemaURL)

    actorID = g.getIntegerProperty("actorID")
    name = g.getStringProperty("name")
    original_title = g.getStringProperty("original_title")
    dates = g.getStringProperty("release_date")

    dateFormatter1 = "%d/%m/%Y"
    dateFormatter2 = "%Y-%m-%d"

    for e in g1['links']:
        actor1 = e["sourceLabel"]
        actor2 = e["targetLabel"]
        titles = []
        for n in name.getNodesEqualTo(actor1):
            src = n
        for film in g.getInOutNodes(src):
            for tgt in g.getInOutNodes(film):
                if name[tgt] == actor2:
                    if "/" in dates[film]:
                        year = datetime.strptime(dates[film], dateFormatter1)
                        year = str(year)[0:4]
                    elif "-" in dates[film]:
                        year = datetime.strptime(dates[film], dateFormatter2)
                        year = str(year)[0:4]
                    if (len(titles) == 0):
                        titles.append("<br>" + original_title[film] + " (" +
                                      year + ")")
                    else:
                        i = 0
                        while (titles[i][-5:-1] < year
                               and i + 1 < len(titles)):
                            i += 1
                        if titles[i][-5:-1] < year:
                            titles.append("<br>" + original_title[film] +
                                          " (" + year + ")")
                        else:
                            titles.insert(
                                i, "<br>" + original_title[film] + " (" +
                                year + ")")
        titles[0] = titles[0][4:]
        e['titles'] = titles

    return render_template("communities.html",
                           title="Communautés du cinéma francophone",
                           graph=g1)
Exemple #14
0
def home():
    g = tlp.loadGraph('marvel.tlpb')
    global dico_nodes
    dico_nodes = graphe_marvel_sans_comics(g)
    plabel = g.getStringProperty("viewLabel")
    color = g.getColorProperty("viewColor")
    global nodes
    global nodes_id
    global links
    (nodes, nodes_id) = creation_nodes(plabel, dico_nodes, color)
    links = liens(dico_nodes, nodes_id)

    # A modifier, ici on veut load le graphe une seule fois,
    # Faire en sorte que les fonctions se basent sur le graphe
    # g = tlp.loadGraph('heroes.tlpb')

    return render_template("home.html", title="Home")
def data_movies_runtime():
    g = tlp.loadGraph(FRCinemaURL)
    release_date = g.getStringProperty("release_date")
    original_title = g.getStringProperty("original_title")
    runtime = g.getIntegerProperty("runtime")

    dates = {}
    annees = {}
    runtime_films = {}

    for n in g.getNodes():
        if (original_title[n] != "" and runtime[n] > 0):
            dates[original_title[n]] = release_date[n]
            annees[original_title[n]] = release_date[n]
            runtime_films[original_title[n]] = runtime[n]

    deb, fin, annees = parseYears(dates, annees)

    runtime_moyens = []
    for a in range(deb, fin):
        runtime_moyens.append([])
        for film in annees:
            if annees[film] == a:
                runtime_moyens[a - deb].append(runtime_films[film])

    for a in range(fin - deb):
        runtime_moyens[a] = sum(runtime_moyens[a]) / len(
            runtime_moyens[a]) if len(runtime_moyens[a]) != 0 else 0

    #csv
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("year", "value"))
    for n in range(deb, fin):
        if (runtime_moyens[n - deb] != 0):
            writer.writerow((n, runtime_moyens[n - deb]))

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_movies_runtime.csv"
    output.headers["Content-type"] = "text/csv"

    return output
Exemple #16
0
def get_top_10():
    # load graph data
    global hero
    heroes_graph = tlp.loadGraph('heroes_final.tlpb')
    node = heroes_graph['viewLabel'].getNodesEqualTo(hero).next()
    heroes_graph['viewSelection'].setAllNodeValue(False)
    heroes_graph['viewSelection'][node] = True
    params = tlp.getDefaultPluginParameters('Reachable SubGraph', heroes_graph)
    params['distance'] = 1
    params['edge direction'] = 'all edges'
    heroes_graph.applyBooleanAlgorithm('Reachable SubGraph', params)
    heroes_graph.addSubGraph(heroes_graph['viewSelection'],
                             name="SubGraph_hero")
    SubGraph_hero = heroes_graph.getSubGraph("SubGraph_hero")
    # compute node degree
    metricprop = SubGraph_hero.getDoubleProperty("viewMetric")
    SubGraph_hero.applyDoubleAlgorithm("Degree", metricprop)

    # get 10 characters with the highest Degree (most published)
    picon = SubGraph_hero.getStringProperty("viewIcon")
    plabel = SubGraph_hero.getStringProperty("viewLabel")
    best10 = []
    i = 0
    # iterate in descending order (max to min)
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-human":
            best10.append((plabel[n], metricprop[n]))
            i += 1
        if i == 10:
            break
    # produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("name", "val"))
    for n in best10:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=get_top_10.csv"
    output.headers["Content-type"] = "text/csv"
    return output
def predicting_profits():
    g = tlp.loadGraph(FRCinemaURL)

    budget = g.getIntegerProperty("budget")
    original_title = g.getStringProperty("original_title")
    popularity = g.getDoubleProperty("popularity")
    release_date = g.getStringProperty("release_date")
    revenue = g.getDoubleProperty("revenue")
    runtime = g.getIntegerProperty("runtime")
    vote_average = g.getDoubleProperty("vote_average")
    vote_count = g.getIntegerProperty("vote_count")

    nb_films = 0
    for n in g.getNodes():
        if original_title[n] != "":
            if popularity[n] > 0 and runtime[n] > 0 and vote_average[
                    n] > 0 and vote_count[n] > 0:
                nb_films = nb_films + 1
    return str(nb_films)
Exemple #18
0
def graphe_marvel_tulip():
    g = tlp.loadGraph('marvel.tlpb')
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")

    g['viewSelection'].setAllNodeValue(False)
    heroes_dict = {
    }  #Dictionnaire que l'on va utiliser pour augmenter le poids sur les arêtes
    for n in g.getNodes():
        if picon[n] == 'md-book-open':
            voisins = list(tlp.Graph.getInOutNodes(g, n))
            counter = 1
            for x in voisins:
                for y in voisins[
                        counter:]:  # On commence a counter pour ne pas repasser sur des nodes que l'on a deja vu
                    edge_x_y = tlp.Graph.existEdge(g, x, y, directed=True)
                    edge_y_x = tlp.Graph.existEdge(g, y, x, directed=True)
                    if (tlp.edge.isValid(edge_x_y)
                            or tlp.edge.isValid(edge_y_x)
                        ):  # On regarde si une des 2 edge existe
                        if (tlp.edge.isValid(edge_x_y)
                            ):  #Cas 1 l'arête de x vers y existe
                            heroes_dict = tlp.Graph.getEdgePropertiesValues(
                                g, edge_x_y)
                            heroes_dict[
                                'poids'] += 1  # On augmente le poids de l'arête de 1
                            tlp.Graph.setEdgePropertiesValues(
                                g, edge_x_y, heroes_dict)
                        if (tlp.edge.isValid(edge_y_x)
                            ):  #Cas 2 l'arête de y vers x existe
                            heroes_dict = tlp.Graph.getEdgePropertiesValues(
                                g, edge_y_x)
                            heroes_dict[
                                'poids'] += 1  # On augmente le poids de l'arête de 1
                            tlp.Graph.setEdgePropertiesValues(
                                g, edge_y_x, heroes_dict)
                    else:
                        tlp.Graph.addEdge(g, x, y, {
                            "poids": 1
                        })  # On crée l'arête et on initialise son poids à 1
                counter += 1
            tlp.Graph.delNode(g, n)
    tlp.saveGraph(g, "heroes.tlpb")
Exemple #19
0
def getData():
    #load graph data
    graph = tlp.loadGraph('marvel.tlpb')
    dico_nodes = {}
    dico_nodes_0_doublon = {}
    # dictionnaire dans lequel la clé est une node
    # et la valeur est la liste d'adjacence de distance 2 de cette node.
    picon = graph.getStringProperty("viewIcon")
    # On ne s'intéresse qu'aux heros
    for n in picon.getNodesEqualTo("md-human"):
        dico_nodes[n] = []
        dico_nodes_0_doublon[n] = []
        neigh = graph.getInOutNodes(n)
        # On récupère tous les comics dans lequel le hero apparaît
        for v in neigh:
            #on récupère tous les heros de ce comic, sauf n
            #on les met dans la liste d'adj si ils n'y sont pas deja
            v_neigh = graph.getInOutNodes(v)
            for u in v_neigh:
                # Changer pour mettre un poids à la place
                if (u != n):
                    dico_nodes[n].append(u)
                    if (u not in dico_nodes_0_doublon[n]):
                        dico_nodes_0_doublon[n].append(u)
    #return([dico_nodes,dico_nodes_0_doublon])

    # On veut les noms des personnages plutot que les nodes
    #plabel = g.getStringProperty("viewLabel")

    #produce a csv  return it
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")
    writer.writerow(["character", "nb_relat", "nb_amis"])
    for n in dico_nodes:
        writer.writerow([n, len(dico_nodes[n]), len(dico_nodes_0_doublon[n])])

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_histo_marvel.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #20
0
def annealing(graph,
              cost_function,
              random_neighbour,
              temperature,
              maxsteps=1000,
              distance=.1,
              p=.9,
              debug=True):
    """ Optimize the black-box function 'cost_function' with the simulated annealing algorithm."""
    state = graph
    cost = cost_function(state)
    states, costs = [get_position(graph)], [cost]
    tlp.saveGraph(state, './BSF.tlp')
    T = temperature
    sigma_temp = 1  # A CHANGER !!!
    for step in range(maxsteps):
        state = tlp.loadGraph('./BSF.tlp')
        fraction = step / float(maxsteps)
        # sigma_temp = np.std(costs)
        position = get_position(state)
        node = random_node(state)
        new_state, new_position, type = move_node_n3(state, position, node, p)
        new_cost = cost_function(new_state)
        if debug:
            print(
                "Step #{:>2}/{:>2} : T = {:>4.3g}, cost = {:>4.3g}, new_cost = {:>4.3g}, type {} ..."
                .format(step, maxsteps, T, cost, new_cost, type))
        if acceptance_probability(cost, new_cost, T) > rn.random():
            tlp.saveGraph(state, './BSF.tlp')
            state, cost = new_state, new_cost
            states.append(position)
            costs.append(cost)
            print("  ==> Accept it!")
        else:
            print("  ==> Reject it...")
        sigma_temp = 1  # A REVOIR !!!!
        T = new_temperature(T, sigma_temp, distance)
    return state, cost_function(state), states, costs
def data_movies_nbfilms():
    g = tlp.loadGraph(FRCinemaURL)

    release_date = g.getStringProperty("release_date")
    original_title = g.getStringProperty("original_title")

    dates = {}
    annees = {}

    for n in g.getNodes():
        if (original_title[n] != ""):
            dates[original_title[n]] = release_date[n]
            annees[original_title[n]] = release_date[n]

    deb, fin, annees = parseYears(dates, annees)

    nb_films = []
    for a in range(deb, fin):
        nb = 0
        for film in annees:
            if annees[film] == a:
                nb = nb + 1
        nb_films.append(nb)

    #csv
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")

    writer.writerow(("year", "value"))
    for n in range(deb, fin):
        writer.writerow((n, nb_films[n - deb]))

    output = make_response(csvdata.getvalue())
    output.headers[
        "Content-Disposition"] = "attachment; filename=data_movies.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #22
0
def barplot2():
    # load graph data
    global nb_heroes
    list_heroes = []

    g = tlp.loadGraph('marvel.tlpb')

    # compute node degree
    metricprop = g.getDoubleProperty("viewMetric")
    g.applyDoubleAlgorithm("Degree", metricprop)

    # get 10 characters with the highest Degree (most published)
    picon = g.getStringProperty("viewIcon")
    plabel = g.getStringProperty("viewLabel")
    i = 0

    heroes = []
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-human":
            list_heroes.append(plabel[n])
            i = i + 1
        if (i == int(nb_heroes)):
            break
    # iterate in descending order (max to min)
    i = 0
    for n in list_heroes:
        heroes.append(n)
        i += 1
        if (i == int(nb_heroes)):
            break

    comics = {}
    for n in metricprop.getSortedNodes(None, False):
        if picon[n] == "md-book-open":
            date = plabel[n].split()
            c = 0
            for b in date:
                if "(1" in b or "(2" in b:
                    date = re.sub("[()]", "", b)
                    c = c + 1
                    break
            if (c == 0):
                date = 0
            try:
                date = int(date)
            except ValueError:
                date = 0
            if (int(date) != 0):
                voisins = list(tlp.Graph.getInOutNodes(g, n))
                for x in voisins:
                    if (plabel[x] in heroes):
                        if (plabel[x] not in comics):
                            comics[plabel[x]] = {
                                "a": 0,
                                "b": 0,
                                "c": 0,
                                "d": 0,
                                "e": 0
                            }

                        comics[plabel[x]]["e"] = comics[plabel[x]]["e"] + 1
                        if (date < 1960):
                            comics[plabel[x]]["a"] = comics[plabel[x]]["a"] + 1
                        elif (1960 <= date and date < 1980):
                            comics[plabel[x]]["b"] = comics[plabel[x]]["b"] + 1
                        elif (1980 <= date and date < 2000):
                            comics[plabel[x]]["c"] = comics[plabel[x]]["c"] + 1
                        else:
                            comics[plabel[x]]["d"] = comics[plabel[x]]["d"] + 1

    best_50 = []

    for n in sorted(comics):
        l = [n]
        for m in sorted(comics[n]):
            l.append(comics[n][m])
        best_50.append(tuple(l))
    csvdata = io.StringIO()
    writer = csv.writer(csvdata, delimiter=",")
    writer.writerow(("hero", "a", "b", "c", "d", "Total"))
    for n in best_50:
        writer.writerow(n)

    output = make_response(csvdata.getvalue())
    output.headers["Content-Disposition"] = "attachment; filename=barplot2.csv"
    output.headers["Content-type"] = "text/csv"
    return output
Exemple #23
0
# viewLabelBorderWidth = graph['viewLabelBorderWidth']
# viewLabelColor = graph['viewLabelColor']
# viewLabelPosition = graph['viewLabelPosition']
# viewLayout = graph['viewLayout']
# viewMetric = graph['viewMetric']
# viewRotation = graph['viewRotation']
# viewSelection = graph['viewSelection']
# viewShape = graph['viewShape']
# viewSize = graph['viewSize']
# viewSrcAnchorShape = graph['viewSrcAnchorShape']
# viewSrcAnchorSize = graph['viewSrcAnchorSize']
# viewTexture = graph['viewTexture']
# viewTgtAnchorShape = graph['viewTgtAnchorShape']
# viewTgtAnchorSize = graph['viewTgtAnchorSize']

g = tlp.loadGraph('marvel.tlpb')

picon = g.getStringProperty("viewIcon")
plabel = g.getStringProperty("viewLabel")

name = 'Marvel Super Heroes (1990) #2'
node = g['viewLabel'].getNodesEqualTo(name).next()
g['viewSelection'].setAllNodeValue(False)
heroes_dict = {}
debug = 1
for n in g.getNodes():
    debug += 1
    if debug == 20000:
        print("DEBUG")
    if picon[n] == 'md-book-open':
        voisins = list(tlp.Graph.getInOutNodes(g, n))
Exemple #24
0
def getLouvain():
    # On charge les données du graphe
    g = tlp.loadGraph('heroes.tlpb')
    plabel = g.getStringProperty("viewLabel")
    metricprop = g.getDoubleProperty("viewMetric")
    layout = g.getLayoutProperty("viewLayout")
    # On réalise le nettoyage des données
    # On ne conserve que les nodes ayant un degre suffisant
    # Les nodes qui ont un degre trop petit ne font pas assez partie du reseau social

    g = nettoyage(g, 300)

    # Ce filtrage est un peu artificiel, il nous permet de passer de 38 communautés sans filtrage
    # A 8 communautés après. Il est impossible de representer clairement 38 communautés avec des couleurs

    #On veut faire en sorte que notre algo prenne en compte le poids des arêtes pour créer les commus
    params = tlp.getDefaultPluginParameters('Louvain', g)
    poids = g.getIntegerProperty("poids")
    params['metric'] = poids

    g.applyDoubleAlgorithm('Louvain', params)

    # On a appliqué l'algo de Louvain a notre graphe. Dans view Metric se trouve le numero de la commu
    # Attention, le nombre de commus ne doit pas etre plus grand que 12
    color_scale = echelle_couleur()

    params2 = tlp.getDefaultPluginParameters('Color Mapping', g)
    params2["color scale"] = color_scale
    color = g.getColorProperty("viewColor")

    g.applyColorAlgorithm('Color Mapping', params2)
    #Maintenant, on a une couleur différente pour chaque communauté

    #On applique l'algo FM^3 (OGDF) pour dessiner le graphe en regroupant les communautés
    g.applyLayoutAlgorithm('FM^3 (OGDF)')

    #On recupère les dictionnaires des nodes et des arêtes et on trace le graphe
    dico_Nodes = graphe_heros_sans_comics(g)

    Nodes = [
    ]  #Liste de dictionnaire. On en a besoin pour tracer le graphe avec D3
    Nodes_id = {
    }  #nodes_id est un dictionnaire. Les cles sont les nodes. Les valeurs sont l'id de ces nodes. On cree nos propres id
    # nodes etant une liste, il est plus facile d'acceder a l'id des nodes stockees avec dictionnaire.
    id = 0
    for n in dico_Nodes.keys():
        Nodes_id[n] = id
        Nodes.append({
            "id": id,
            "name": plabel[n],
            "size": 5,
            "color": list(color[n]),
            "communauté": metricprop[n],
            "x": layout[n][0],
            "y": layout[n][1]
        })
        id = id + 1

    Nodes = coloration(Nodes)

    for n in Nodes:
        n["size"] = 50

    Links = liens(dico_Nodes, Nodes_id)

    print("le nombre de communautés est : ", params["#communities"])
    print("la maudularité vaut : ", params["modularity"])

    # on renvoie le json
    dico = {}
    dico["nodes"] = Nodes
    dico["links"] = Links
    resp = json.dumps(dico)  # resp = json.dumps(nodes) + json.dumps(links)
    output = make_response(resp)
    output.headers["Content-Disposition"] = "attachment; filename=louvain.json"
    output.headers["Content-type"] = "json"
    return output
Exemple #25
0
from tulip import tlp
from tulipgui import tlpgui

graph = tlp.loadGraph('save/airport.tlpbz')

for n in graph.nodes():
    graph['viewMetric'][n] = float(graph['Timezone'][n])

ds = tlp.getDefaultPluginParameters('Color Mapping')
ds['color scale'] = {
    0: (255, 25, 28, 200),
    0.33: (253, 174, 97, 200),
    0.66: (171, 221, 164, 200),
    1: (43, 131, 186, 200)
}
ds['input property'] = graph['viewMetric']

graph.applyColorAlgorithm('Color Mapping', ds)
graph['viewColor'].setAllEdgeValue((200, 200, 200, 200))
nodeLinkView = tlpgui.createView("Geographic view", graph, {}, True)
def convert(input_path, leaf_only=False, bounding_shape='convex_hull'):
    graph = tlp.loadGraph(input_path)

    # Retrieve nodes and edges from graph and construct Shapely geometries
    view_layout = graph.getLayoutProperty('viewLayout')
    view_size = graph.getSizeProperty('viewSize')

    leaf_nodes = [
        {
            'id':
            n.id,
            'parent_metanode':
            None,  # fulfill later
            'geometry':
            Point(view_layout[n].x(),
                  view_layout[n].y()).buffer(view_size[n][0] / 2.0,
                                             cap_style=CAP_STYLE.round),
            'diameter':
            view_size[n][0]
        } for n in graph.nodes()
    ]

    node_mapping = {}
    for n in leaf_nodes:
        node_mapping[n['id']] = n

    edges = []
    check_dup = {}
    for e in graph.getEdges():
        src, tgt = graph.ends(e)
        if src.id > tgt.id:
            tmp = src
            src = tgt
            tgt = tmp
        edge_id = '{}-{}'.format(src.id, tgt.id)

        # remove duplicate and self-connecting edges
        if edge_id not in check_dup and src.id != tgt.id:
            edges.append({
                'id':
                e.id,
                'ends': (src.id, tgt.id),
                'geometry':
                chop_segment(node_mapping[src.id]['geometry'],
                             node_mapping[tgt.id]['geometry'])
            })
            check_dup[edge_id] = True

    bbox = tlp.computeBoundingBox(graph)
    root = graph.getId()
    height = {'a': 1}
    metanodes = {}

    # Construct a simple node (graph) hierarchy data structure from the tulip graph and count levels
    # This is the same level counting method in the Bourqui multi-level force layout paper.

    def dfs(g, cur_height):
        node = {
            'id': g.getId(),
            'geometry': None,
            'diameter': 0,
            'desc_metanodes': {},
            'parent_metanode': None,
            'leaf_nodes': {},
            'level': cur_height
        }

        # For finding whether a node is in a subgraph
        for leaf in g.getNodes():
            node['leaf_nodes'][leaf.id] = True
        # For finding whether two meta-nodes are on the same path of the node hierarchy
        for s in g.getDescendantGraphs():
            node['desc_metanodes'][s.getId()] = True

        height['a'] = max(height['a'], cur_height + 1)
        for s in g.getSubGraphs():
            dfs(s, cur_height + 1)
            metanodes[s.getId()]['parent_metanode'] = g.getId()

        # Add the field parent_metanode to the leaf node
        for leaf in g.getNodes():
            tmp = leaf_nodes[leaf.id]
            if tmp['parent_metanode'] is None:
                tmp['parent_metanode'] = g.getId()

        # Compute convex hull of this sub-graph in post-order
        if bounding_shape == 'convex_hull':
            if g.numberOfSubGraphs() == 0:
                # compute a convex hull of its leaf nodes
                coords = tlp.computeConvexHull(g)
                node['geometry'] = Polygon([(c.x(), c.y()) for c in coords])
            else:
                # union the convex hull of its sub-graphs
                node['geometry'] = MultiPolygon([
                    metanodes[s.getId()]['geometry'] for s in g.getSubGraphs()
                ]).convex_hull

            # Note that we don't compute the real diameter for a polygon, but instead, only use the diagonal of
            # the axis aligned bounding box to approximate the diameter, which is cheap to compute
            bbox = node['geometry'].bounds
            node['diameter'] = Point(bbox[0],
                                     bbox[1]).distance(Point(bbox[2], bbox[3]))
        elif bounding_shape == 'circle':
            center, fur = tlp.computeBoundingRadius(g)
            radius = center.dist(fur)
            node['geometry'] = Point(center.x(), center.y()).buffer(
                radius, cap_style=CAP_STYLE.round)
            node['diameter'] = 2 * radius
        else:
            bbox = tlp.computeBoundingBox(g)
            node['geometry'] = Polygon([(c.x(), c.y()) for c in bbox])
            node['diameter'] = bbox[0].dist(bbox[1])

        metanodes[g.getId()] = node

    if not leaf_only:
        dfs(graph, root)

    # Output json file at the same directory with same filename but "json" extension
    output_path = re.sub(r'\.tlp$', '.json', input_path)

    # Use the mapping function from shapely to serialize the geometry objects
    for n in leaf_nodes:
        n['geometry'] = mapping(n['geometry'])
    for e in edges:
        e['geometry'] = mapping(e['geometry'])
    for _, n in metanodes.items():
        n['geometry'] = mapping(n['geometry'])

    json_data = {
        'leaf_nodes': leaf_nodes,
        'edges': edges,
        'height': height['a'],
        'root': root,
        'metanodes': metanodes,
        'bounding_box': [[bbox[0].x(), bbox[0].y()],
                         [bbox[1].x(), bbox[1].y()]]
    }
    json.dump(json_data, open(output_path, 'w'))
    print('Converted to ', output_path, ' #nodes:', len(leaf_nodes),
          ' #edges: ', len(edges), ' height: ', height['a'])
 def setUp(self):
     self.g1 = tlp.loadGraph("tulip_wrapper/tests/g1.tlp")
     self.g1_pruned = tlp.loadGraph("tulip_wrapper/tests/g1_pruned.tlp")
     self.sas_30 = tlp.loadGraph("tulip_wrapper/tests/sas_30.tlp")
Exemple #28
0
def loadGraph(request):
    # return list of all graphs the user can load
    if (request.method == "GET"
            and request.GET.get("network_name", None) != None):
        nameOfGraph = request.GET.get("network_name", None)

        toPrune = request.GET.get("toPrune", False)
        toPrune = toPrune == "true"

        toCliqueBundle = request.GET.get("toCliqueBundle", False)
        toCliqueBundle = toCliqueBundle == "true"

        toEdgeBundle = request.GET.get("toEdgeBundle", False)
        toEdgeBundle = toEdgeBundle == "true"

        networkFromDB = Network.objects.filter(network_name=nameOfGraph)[0]
        #print (networkFromDB.network_file.name)

        beforeTlpLoaded = time.time()
        currentNetwork = tlp.loadGraph(
            os.path.join(settings.MEDIA_ROOT, networkFromDB.network_file.name))
        afterTlpLoaded = time.time()

        tlpLoadTime = round(((afterTlpLoaded - beforeTlpLoaded) * 1000.0), 1)

        if currentNetwork is None:
            return JsonResponse({
                "success": False,
                "message": "File failed to load"
            })
        else:
            pruningTime = False
            cliqueBundlingTime = False
            edgeBundlingTime = False
            nodesBeenPruned = []
            numDeletedClique = []
            numDeletedEdge = []

            if (toPrune):
                beforePruned = time.time()
                currentNetwork, nodesBeenPruned = NetworkOptimiser.nodePruning(
                    currentNetwork)
                afterPruned = time.time()
                pruningTime = round(((afterPruned - beforePruned) * 1000.0), 1)

            if (toCliqueBundle):
                beforeCliqueBundled = time.time()
                currentNetwork, numDeletedClique = NetworkOptimiser.cliqueBasedNodeBundling(
                    currentNetwork)
                afterCliqueBundled = time.time()
                cliqueBundlingTime = round(
                    ((afterCliqueBundled - beforeCliqueBundled) * 1000.0), 1)

            if (toEdgeBundle):
                beforeEdgeBundled = time.time()
                currentNetwork, numDeletedEdge = NetworkOptimiser.edgeBasedNodeBundling(
                    currentNetwork)
                afterEdgeBundled = time.time()
                edgeBundlingTime = round(
                    ((afterEdgeBundled - beforeEdgeBundled) * 1000.0), 1)

            graphInJson = TlpJsonConverter.tlp_to_json(nameOfGraph,
                                                       currentNetwork,
                                                       nodesBeenPruned,
                                                       numDeletedClique,
                                                       numDeletedEdge)

            return JsonResponse({
                "success": True,
                "data": graphInJson,
                "tlpLoadTime": tlpLoadTime,
                "pruningTime": pruningTime,
                "cliqueBundlingTime": cliqueBundlingTime,
                "edgeBundlingTime": edgeBundlingTime
            })
    return JsonResponse({
        "success": False,
        "message": "Define name and use GET"
    })
Exemple #29
0
    def process(self, project_id: str, filename: str, meta: OveAssetMeta,
                options: Dict):
        logging.info("Copying %s/%s/%s into the temp place ...", project_id,
                     meta.id, filename)

        with TemporaryDirectory() as input_folder:
            with TemporaryDirectory() as output_folder:
                os.mkdir(os.path.join(
                    input_folder,
                    os.path.split(filename)
                    [0]))  # make subdirectory for asset version number

                network_file = os.path.join(input_folder, filename)
                open(network_file, 'a').close()

                self._file_controller.download_asset(
                    project_id=project_id,
                    asset_id=meta.id,
                    filename=filename,
                    down_filename=network_file)

                algorithm = options.get('algorithm', "FM^3 (OGDF)")

                params = tlp.getDefaultPluginParameters(algorithm)
                for param in params:
                    params[param] = self.convert_param(
                        options.get(algorithm + '_' + param, params[param]))

                logging.info("Received options %s ...", options)
                logging.info(
                    "Performing layout using algorithm %s and options %s ...",
                    algorithm, params)

                graph = tlp.loadGraph(network_file)
                graph.applyLayoutAlgorithm(algorithm, params)

                result_name = options.get('result_name')
                if not result_name:
                    result_name = meta.id.split('.')[0] + '.gml'
                tlp.saveGraph(graph, os.path.join(output_folder, result_name))

                with open(
                        os.path.join(output_folder, result_name + ".options"),
                        'w') as fp:
                    json.dump(options, fp)

                self._file_controller.upload_asset_folder(
                    project_id=project_id,
                    meta=meta,
                    upload_folder=output_folder,
                    worker_name=self.name)

        base_name = os.path.splitext(os.path.basename(filename))[0]
        meta.index_file = os.path.join(meta.worker_root + self.name, base_name,
                                       result_name)

        self._file_controller.set_asset_meta(project_id=project_id,
                                             asset_id=meta.id,
                                             meta=meta)
        logging.info("Finished generating %s/%s into the storage ...",
                     project_id, meta.id)
def films(acteur="Jean Reno"):

    # load graph data
    mainGraph = tlp.loadGraph(FRCinemaURL)
    name = mainGraph.getStringProperty("name")
    viewSelection = mainGraph.getBooleanProperty("viewSelection")
    viewSelection.setAllNodeValue(False)
    viewMetric = mainGraph.getDoubleProperty("viewMetric")
    mainGraph.applyDoubleAlgorithm("Degree", viewMetric)

    names = []

    for n in mainGraph.getNodes():
        if name[n] != "" and viewMetric[n] > 1:
            names.append(name[n])

    names = np.sort(names)

    # Subgraph corresponding to the actor in parameter
    i = 0
    for n in name.getNodesEqualTo(acteur):
        i = i + 1
        viewSelection[n] = True
    if (i == 0):
        return 'There was a problem generating a graph for actor ' + acteur

    params = tlp.getDefaultPluginParameters('Reachable SubGraph', mainGraph)

    params['edge direction'] = "all edges"
    params['distance'] = 2

    mainGraph.applyBooleanAlgorithm('Reachable SubGraph', params)
    mainGraph.addSubGraph(viewSelection, name="acteurd2")
    g = mainGraph.getSubGraph("acteurd2")

    # specific film data
    actorID = g.getIntegerProperty("actorID")
    name = g.getStringProperty("name")
    #budget = g.getIntegerProperty("budget")
    #filmID = g.getIntegerProperty("filmID")
    #original_language = g.getStringProperty("original_language")
    original_title = g.getStringProperty("original_title")
    #popularity = g.getDoubleProperty("popularity")
    release_date = g.getStringProperty("release_date")
    #revenue = g.getDoubleProperty("revenue")
    #runtime = g.getIntegerProperty("runtime")
    #vote_average = g.getDoubleProperty("vote_average")
    #vote_count = g.getIntegerProperty("vote_count")

    # useful tulip data
    #viewColor = g.getColorProperty("viewColor")
    #viewIcon = g.getStringProperty("viewIcon")
    #viewLabel = g.getStringProperty("viewLabel")
    viewMetric = g.getDoubleProperty("viewMetric")
    viewSelection = g.getBooleanProperty("viewSelection")

    # Creates a D3-readable graph
    g.applyDoubleAlgorithm("Degree", viewMetric)

    nodes = []
    links = []

    for n in g.getNodes():
        dict = {}
        dict["id"] = original_title[n] if (actorID[n] == 0) else name[n]
        dict["degree"] = viewMetric[n]
        dict["name"] = name[n]
        dict["original_title"] = original_title[n]
        dict['date'] = release_date[n]
        nodes.append(dict)

    for e in g.getEdges():
        dict = {}
        dict["source"] = original_title[g.source(e)] if (
            actorID[g.source(e)] == 0) else name[g.source(e)]
        dict["target"] = original_title[g.target(e)] if (
            actorID[g.target(e)] == 0) else name[g.target(e)]
        links.append(dict)

    graph = {}
    graph['links'] = links
    graph['nodes'] = nodes

    return render_template("acteur_films.html",
                           title="Les Films de " + acteur,
                           acteur=acteur,
                           graph=graph,
                           nodelink=True,
                           names=names)