def __init__(self, version_label, my_node, conf_path, routing_m_mod, lookup_m_mod, experimental_m_mod, private_dht_name, bootstrap_mode): self.bootstrapper = bootstrap.OverlayBootstrapper(conf_path) my_addr = my_node.addr self._my_id = my_node.id # id indicated by user if not self._my_id: self._my_id = self._my_id = identifier.RandomId() # random id self._my_node = Node(my_addr, self._my_id, version=version_label) self.msg_f = message.MsgFactory(version_label, self._my_id, private_dht_name) self._querier = Querier() self._routing_m = routing_m_mod.RoutingManager(self._my_node, self.msg_f, self.bootstrapper) self._responder = responder.Responder(self._my_id, self._routing_m, self.msg_f, bootstrap_mode) self._tracker = self._responder._tracker self._lookup_m = lookup_m_mod.LookupManager(self._my_id, self.msg_f, self.bootstrapper) self._experimental_m = experimental_m_mod.ExperimentalManager( self._my_node.id, self.msg_f) current_ts = time.time() self._next_maintenance_ts = current_ts self._next_timeout_ts = current_ts self._next_main_loop_call_ts = current_ts self._cached_lookups = []
def __init__(self, dht_addr): self.my_addr = dht_addr self.my_id = identifier.RandomId() self.my_node = Node(self.my_addr, self.my_id) self.tracker = tracker.Tracker() self.token_m = token_manager.TokenManager() self.reactor = ThreadedReactor() self.rpc_m = RPCManager(self.reactor, self.my_addr[1]) self.querier = Querier(self.rpc_m, self.my_id) self.routing_m = RoutingManager(self.my_node, self.querier, bootstrap_nodes) self.responder = Responder(self.my_id, self.routing_m, self.tracker, self.token_m) self.responder.set_on_query_received_callback( self.routing_m.on_query_received) self.querier.set_on_response_received_callback( self.routing_m.on_response_received) self.querier.set_on_error_received_callback( self.routing_m.on_error_received) self.querier.set_on_timeout_callback(self.routing_m.on_timeout) self.querier.set_on_nodes_found_callback(self.routing_m.on_nodes_found) self.routing_m.do_bootstrap() self.rpc_m.add_msg_callback(QUERY, self.responder.on_query_received) self.lookup_m = LookupManager(self.my_id, self.querier, self.routing_m)
def __init__(self, dht_addr, state_path, routing_m_mod, lookup_m_mod): self.state_filename = os.path.join(state_path, STATE_FILENAME) self.load_state() if not self._my_id: self._my_id = identifier.RandomId() self._my_node = Node(dht_addr, self._my_id) self._tracker = tracker.Tracker() self._token_m = token_manager.TokenManager() self._reactor = ThreadedReactor() self._reactor.listen_udp(self._my_node.addr[1], self._on_datagram_received) #self._rpc_m = RPCManager(self._reactor) self._querier = Querier(self._my_id) bootstrap_nodes = self.loaded_nodes or BOOTSTRAP_NODES del self.loaded_nodes self._routing_m = routing_m_mod.RoutingManager(self._my_node, bootstrap_nodes) # self._responder = Responder(self._my_id, self._routing_m, # self._tracker, self._token_m) self._lookup_m = lookup_m_mod.LookupManager(self._my_id) current_time = time.time() self._next_maintenance_ts = current_time self._next_save_state_ts = current_time + SAVE_STATE_DELAY self._running = False
def test_querier(self): """ If the number of entries for Fe2O3 in the MP is larger or equal to 12. The larger case is for in case more have been added in the future. """ self.assertTrue(len(Querier(API_KEY, 'Fe2O3').mp_all) >= 12, msg='Number of entries for Fe2O3 is not correct')
def __init__(self, dht_addr): my_addr = dht_addr my_id = identifier.RandomId() my_node = Node(my_addr, my_id) tracker_ = tracker.Tracker() token_m = token_manager.TokenManager() self.reactor = ThreadedReactor() rpc_m = RPCManager(self.reactor, my_addr[1]) querier_ = Querier(rpc_m, my_id) routing_m = RoutingManager(my_node, querier_, bootstrap_nodes) responder_ = Responder(my_id, routing_m, tracker_, token_m) responder_.set_on_query_received_callback(routing_m.on_query_received) querier_.set_on_response_received_callback( routing_m.on_response_received) querier_.set_on_error_received_callback(routing_m.on_error_received) querier_.set_on_timeout_callback(routing_m.on_timeout) querier_.set_on_nodes_found_callback(routing_m.on_nodes_found) routing_m.do_bootstrap() rpc_m.add_msg_callback(QUERY, responder_.on_query_received) self.lookup_m = LookupManager(my_id, querier_, routing_m) self._routing_m = routing_m
def getid(self): """getid() get modem id data""" self.querier = Querier(InsteonAddress("00.00.00")) self.querier.setMsgHandler(IMInfoMsgHandler("getid")) msg = Msg.s_makeMessage("GetIMInfo") self.querier.sendMsg(msg)
def getIMConfig(self): """getIMConfig() get modem configuration flags byte""" self.querier = Querier(InsteonAddress("00.00.00")) self.querier.setMsgHandler(IMConfigMsgHandler("getIMConfig")) msg = Msg.s_makeMessage("GetIMConfig") self.querier.sendMsg(msg)
def do_query(self, query): """ Renders the results page """ q = Querier() results = None try: # extracts only the bindings from the result dictionary bindings = [ r['results']['bindings'] for r in q.query(str(query)) if r['results']['bindings'] != [] ] results = self.unpack_results(bindings) except: # in case of any exception should render an error page results = "ERROR" return self.serve_template('results.txt', results=results)
def __init__(self, version_label, my_node, state_filename, routing_m_mod, lookup_m_mod, experimental_m_mod, private_dht_name, bootstrap_mode): if size_estimation: self._size_estimation_file = open('size_estimation.dat', 'w') self.state_filename = state_filename saved_id, saved_bootstrap_nodes = state.load(self.state_filename) my_addr = my_node.addr self._my_id = my_node.id # id indicated by user if not self._my_id: self._my_id = saved_id # id loaded from file if not self._my_id: self._my_id = self._my_id = identifier.RandomId() # random id self._my_node = Node(my_addr, self._my_id, version=version_label) self.msg_f = message.MsgFactory(version_label, self._my_id, private_dht_name) self._querier = Querier() self._routing_m = routing_m_mod.RoutingManager( self._my_node, saved_bootstrap_nodes, self.msg_f) self._responder = responder.Responder(self._my_id, self._routing_m, self.msg_f, bootstrap_mode) self._tracker = self._responder._tracker self._lookup_m = lookup_m_mod.LookupManager(self._my_id, self.msg_f) self._experimental_m = experimental_m_mod.ExperimentalManager( self._my_node.id, self.msg_f) current_ts = time.time() self._next_save_state_ts = current_ts + SAVE_STATE_DELAY self._next_maintenance_ts = current_ts self._next_timeout_ts = current_ts self._next_main_loop_call_ts = current_ts self._pending_lookups = [] self._cached_lookups = {}
from dash.dependencies import Input, Output # Neo4j Setup from py2neo import Graph import json # Import class to make queries to Neo4j from querier import Querier # Dash Setup external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] app = dash.Dash(__name__, external_stylesheets=external_stylesheets) server = app.server # Connect to Neo4j querier = Querier() # This function returns all components for refresh def serve_layout(): return html.Div(children=[dark_theme()], style={'padding': '0px'}) # Set the website refresh to call a function app.layout = serve_layout # Colors for dark theme theme = { 'dark': True, 'detail': '#149414', 'primary': '#00EA64',
def setup(self): time.mock_mode() self.querier = Querier() #tc.CLIENT_ID)
""" This program queries the Materials Project database with specified materials using user's API key. """ __author__ = "Erpan Arkin" __email__ = "*****@*****.**" description = 'Materials Project Querying Tool' parser = ArgumentParser(description=description) parser.add_argument('API_KEY', metavar='API_KEY', type=str, help="User's API Key to connect to the MP") parser.add_argument('MATERIAL', metavar='MATERIAL', type=str, help='Chemical formula of the target material, e.g. Fe2O3') parser.add_argument('-p', action='store_true', help="plot all entries' id verse energy per atom") args = parser.parse_args() my_query = Querier(args.API_KEY, args.MATERIAL) my_query.GS_finder() my_query.print_results() if args.p: my_query.plot()
from csv import DictReader from rbm import RBM, np from querier import Querier from propertyFinder import PropertyFinder TRAINING_SAMPLE_SIZE = 100 localQuerier = Querier('http://127.0.0.1:9999/bigdata/sparql') localPropertyFinder = PropertyFinder(localQuerier) effectsList = [{'effect': "", 'disease': ""}] csvFile = open("result.csv") trainingSet = DictReader(csvFile) trainingRows = [] trainingData = [[0 for i in range(len(effectsList))] for j in range(TRAINING_SAMPLE_SIZE)] index1=0 for row in trainingSet: if index1 < TRAINING_SAMPLE_SIZE : geneProperties = DictReader(localPropertyFinder.findGeneProperties(row['gene'])) for prop in geneProperties: for index2, item in effectsList: if prop == item: trainingData[index1][index2] = 1 drugProperties = DictReader(localPropertyFinder.findDrugProperties(row['drug'])) for prop in drugProperties: for index2, item in effectsList: if prop == item:
def __init__(self, name, addr): self.name = name self.address = addr self.db = DB() self.querier = Querier(addr) addDev(self)
def setup(self): global time time = querier.time = MockTime() self.querier = Querier(tc.CLIENT_ID)
def startWatch(self): """startWatch() modem will print all incoming messages on terminal""" self.querier = Querier(InsteonAddress("00.00.00")) self.querier.setMsgHandler(MsgDumper("modem")) self.querier.startWait(10000)
def modifyRecord(self, addr, group, controlCode, recordFlags, data, txt): msg = self.__makeModMsg(addr, group, controlCode, recordFlags, data, txt) self.querier = Querier(self.address) self.querier.setMsgHandler(DefaultMsgHandler(txt)) self.querier.sendMsg(msg)
?clinvarVariant clinvarv:Variant_Phenotype ?disease ?clinvarVariant ?effect ?disease } """ # Vide a priori query7 = """ SELECT DISTINCT ?effect ?disease WHERE { ?clinvarVariant clinvar:x-medgen ?disease ?clinvarVariant ?effect ?disease } """ localQuerier = Querier(url) # resultQuery1 = localQuerier.query(prefix+query1) # csvFile1 = open("csvFile1.csv", 'w') # csvFile1.write(resultQuery1) # print "[Running Time] %s sec" % (time.time() - start_time) # resultQuery2 = localQuerier.query(prefix+query2) # csvFile2 = open("csvFile2.csv", 'w') # csvFile2.write(resultQuery2) # print "[Running Time] %s sec" % (time.time() - start_time) # resultQuery3 = localQuerier.query(prefix+query3) # csvFile3 = open("csvFile3.csv", 'w') # csvFile3.write(resultQuery3) # print "[Running Time] %s sec" % (time.time() - start_time)