def main(): g = Graph() g.edges.create(7, "related", 8) try: g = Graph() keys = ["fqdn", "asn", "ipaddress"] for prodList in getData(): if len(prodList[0]) != 0: values = [prodList[0], prodList[1], prodList[2]] dict_object = dict(zip(keys, values)) g = Graph() g.vertices.create(dict_object) except IndexError: raise IndexError
def initialize(): #Make sure you have done a neo4j start before this g = Graph() # uses default server address to connect g.add_proxy("article", Article) g.add_proxy("referral", Referral) return g
def __init__(self): db_url = 'http://{}:{}{}'.format( settings.NEO4J_DATABASES['default']['HOST'], settings.NEO4J_DATABASES['default']['PORT'], settings.NEO4J_DATABASES['default']['ENDPOINT']) config = Config(db_url, settings.NEO4J_DATABASES['default']['USER'], settings.NEO4J_DATABASES['default']['PASSWORD']) self.g = Graph(config)
import sys, re, datetime, neo4j from bulbs.model import Node, Relationship from bulbs.property import String, Integer, DateTime from bulbs.utils import get_logger from bulbs.utils import current_datetime from bulbs.neo4jserver import Graph from bulbs.neo4jserver.batch import Neo4jBatchClient from bizold.input.file import StringReader, StringWriter log = get_logger(__name__) graph = Graph() class BizBase(object): @classmethod def new(cls, graph=graph, **kwargs): proxy = getattr(graph, cls.element_type) return proxy.create(**kwargs) def __repr__(self): result = [] for prop in self.__class__._get_initial_properties().values(): result.append([prop.name, getattr(self, prop.name)]) return '<{}-{}: {}>'.format( self.__class__.__name__, self.eid, ', '.join(['{}: {}'.format(*t) for t in result])) __str__ = __repr__ class Singleton(BizBase): """Class for umltiple inheritance. Together with a neo4j-Node ensures unique document:
import sys import urllib2 from urlparse import urljoin from bs4 import BeautifulSoup import Queue from bulbs.neo4jserver import Graph import time from indexer import PageSoup # import robotparser # used to check robot files DEFAULT_URLS = ['http://www.google.com','http://www.amazon.com','http://www.nytimes.com','http://www.racialicious.com','http://www.groupon.com','http://www.yelp.com'] DEFAULT_DEPTH = 2 DEFAULT_GRAPH = Graph() class BFS_Crawler: """Create an instance of Crawler with a root and its tree""" def __init__(self,graph = DEFAULT_GRAPH,start = 'http://www.google.com',depth = DEFAULT_DEPTH): """Initialize the crawler with the starting urls""" self.g = graph self.root = start self.depth = depth self.start = [] self.indexer = Indexer() self.connection = MongoClient() self.page_db = connection.page_db def process_page(self,url): """Retrieve all html data from a webpage,index it and return a list of links""" links = [] try: p = Page(data)
label = "has_treenode" class PresynapticTo(Relationship): label = "presynaptic_to" class PostsynapticTo(Relationship): label = "postsynaptic_to" #### # Test #### g = Graph() g.add_proxy("neuron", Neuron) g.add_proxy("treenode", Treenode) g.add_proxy("connector", Connector) g.add_proxy("has_child", HasChild) g.add_proxy("has_treenode", HasTreenode) g.add_proxy("presynaptic_to", PresynapticTo) g.add_proxy("postsynaptic_to", PostsynapticTo) # create a few objects neuron1 = g.neuron.create(name="MyNeuron1") neuron2 = g.neuron.create(name="MyNeuron2") neuron3 = g.neuron.create(name="MyNeuron3") treenode1 = g.treenode.create(x=3.3, y=4.3, z=3.2) treenode11 = g.treenode.create(x=3.3, y=4.3, z=3.2)
from bulbs.neo4jserver import Graph, Config, NEO4J_URI from texts.config import NEO4J_URI, NEO4J_USER, NEO4J_PASS from texts.py import * g = Graph(config) def create_link(v1, v2, e1, verb): g.vertices.create(name='v1') g.vertices.create(name='v2') g.edges.create(v1, verb, v2)
id = Integer() name = String(nullable=False) region = String(nullable=False) security = Float() class IsConnectedTo(Relationship): label = "is_connected_to" conn = sqlite3.connect(os.path.expanduser("~/eve.db")) conn.row_factory = sqlite3.Row # populate graph from bulbs.neo4jserver import Graph, Config, NEO4J_URI g = Graph(Config(NEO4J_URI, "neo4j", "key")) g.add_proxy("system", System) g.add_proxy("is_connected_to", IsConnectedTo) systems = {} for item in g.V[1::]: systems[item.get("id")] = item def id_to_name(i): c = conn.cursor() c.execute("select solarSystemName from mapSolarSystems where solarSystemID=?", (i, )) return c.fetchone()[0] def distance_between(f, t): print f, t
import Test from bulbs.config import Config from bulbs.tests import BulbsTestCase, bulbs_test_suite from bulbs.neo4jserver import Graph, Neo4jClient, NEO4J_URI, \ VertexIndexProxy, EdgeIndexProxy, ExactIndex from bulbs.tests import GremlinTestCase config = Config(NEO4J_URI) BulbsTestCase.client = Neo4jClient(config) BulbsTestCase.vertex_index_proxy = VertexIndexProxy BulbsTestCase.edge_index_proxy = EdgeIndexProxy BulbsTestCase.index_class = ExactIndex BulbsTestCase.graph = Graph(config) def test_suite(): suite = bulbs_test_suite() #suite.addTest(unittest.makeSuite(RestTestCase)) suite.addTest(Test.makeSuite(GremlinTestCase)) return suite if __name__ == '__main__': Test.main(defaultTest='test_suite')