def tearDown(self): # flush the database via storage storage('redis', flush_db=True, host=redis_host, port=redis_port, db=redis_db)
def __init__(self): try: self.storage = storage(storage.ST_REDIS) except: if not g.debug: g.logger.error('Failed to create a connection to Redis: Use file-based storage instead.') self.storage = storage(storage.ST_FILE)
def opendb(self,db_name): self.file_obj=storage(db_name) if self.file_obj.fptr is None: print ("db does not exist") return 0 return 1
def add_users_to_database(self): config.main["mysc"][ "save_friends_in_autocompletion_db"] = self.dialog.friends_buffer.GetValue( ) config.main["mysc"][ "save_followers_in_autocompletion_db"] = self.dialog.friends_buffer.GetValue( ) output.speak( _(u"Updating database... You can close this window now. A message will tell you when the process finishes." )) database = storage.storage() if self.dialog.followers_buffer.GetValue() == True: buffer = self.window.search_buffer("people", "followers") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if self.dialog.friends_buffer.GetValue() == True: buffer = self.window.search_buffer("people", "friends") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2) wx_settings.show_success_dialog() self.dialog.Destroy()
def add_users_to_database(self): self.config["mysc"][ "save_friends_in_autocompletion_db"] = self.dialog.get( "friends_buffer") self.config["mysc"][ "save_followers_in_autocompletion_db"] = self.dialog.get( "followers_buffer") output.speak( _(u"Updating database... You can close this window now. A message will tell you when the process finishes." )) database = storage.storage(self.buffer.session.session_id) if self.dialog.get("followers_buffer") == True: buffer = self.window.search_buffer( "followers", self.config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]["items"]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if self.dialog.get("friends_buffer") == True: buffer = self.window.search_buffer( "friends", self.config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]["items"]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2) wx_settings.show_success_dialog() self.dialog.destroy()
def _init_hashtables(self): """ Initialize the hash tables such that each record will be in the form of "[storage1, storage2, ...]" """ self.hash_tables = [ storage(self.storage_config, i) for i in range(self.num_hashtables) ]
def _init_hashtables(self): """ Initialize the hash tables such that each record will be in the form of "[storage1, storage2, ...]" """ self.hash_tables = [ storage(self.storage_config, i) for i in xrange(self.num_hashtables) ]
def setUp(self): store = storage('redis', flush_db=True, host=redis_host, port=redis_port, db=redis_db) self.oi = OriginalTerms(store) self.st = SuggestTerms(store)
def __init__(self, edit_distance_max=2, best_suggestions_only=True, storage_type=None, **kwargs): store = storage(storage_type, **kwargs) self._terms = OriginalTerms(store) self._suggestions = SuggestTerms(store, best_suggestions_only) self.edit_distance_max = edit_distance_max self.best_suggestions_only = best_suggestions_only
def __init__(self, window): super(autocompletionManage, self).__init__() self.window = window self.dialog = wx_manage.autocompletionManageDialog() self.database = storage.storage() self.users = self.database.get_all_users() self.dialog.put_users(self.users) self.dialog.add.Bind(wx.EVT_BUTTON, self.add_user) self.dialog.remove.Bind(wx.EVT_BUTTON, self.remove_user) self.dialog.ShowModal()
def __init__(self, session): super(autocompletionManage, self).__init__() self.session = session self.dialog = wx_manage.autocompletionManageDialog() self.database = storage.storage(self.session.session_id) self.users = self.database.get_all_users() self.dialog.put_users(self.users) widgetUtils.connect_event(self.dialog.add, widgetUtils.BUTTON_PRESSED, self.add_user) widgetUtils.connect_event(self.dialog.remove, widgetUtils.BUTTON_PRESSED, self.remove_user) self.dialog.get_response()
def init_connection(): uid_map = { 'daniel': 75535935 } g.connection_start = datetime.datetime.now() # fundamental components g.app = app g.logger = app.logger g.debug = app.debug g.args = request.args # set uid of author account g.author_uid = uid_map['daniel'] # set up client-side and server-side storage from storage import storage # server-side storage: files or REDIS g.server_store = server_store() # client-side storage: HTTP session g.client_store = storage(storage.ST_HTTP_SESSION) # Dropbox file metadata cache from dbx import dbx_open g.open = dbx_open # user and author account g.user = None g.author = None if not request.path.startswith('/!user'): # user account g.user = get_current_user() # author account g.author = get_author() # if author account is not connected: if g.author is None: if g.user is None: # make the current user to login return redirect(url_for('user_login')) else: if is_current_user_author(): # if the current user is author: # update the author's access token to server-storage update_author_token() g.author = get_author() if g.author is None: g.logger.error('Failed to connect author account.') return 'Failed to connect author account.', 500 else: # if the current user is not author: just fail g.logger.error('Failed to connect author account.') return 'Author account was not connected.', 500
def _init_hashtables(self): self._bands = [] i = 0 while i < self.input_dim: j = i + self.bwidth - 1 if j >= self.input_dim: # discard the rest break self._bands.append((i, j)) i = j + 1 self.num_hashtables = len(self._bands) self._hashtables = [storage(self.storage_config, i) for i in xrange(self.num_hashtables)]
def main(): ''' ''' root = "/data" with storage(root) as s: map_context = MapContext() loop = asyncio.get_event_loop() loop.create_task(fetch_vehicles(s, map_context)) loop.create_task(fetch_routes(s, map_context)) loop.run_forever() loop.close() logger.info("Close")
def run(self): bo_f = 0 bp_f = 0 pul_f = 0 bp = [] bo = [] pul = [] while self.thread_stop is False: try: data = self.Inqueue.get() if data == 'quit': self.Outqueue.put('quit') self.thread_stop = True break if data[0] == 'bo': if bo_f == 0: bo_f = 1 bo = data[1] if data[0] == 'bp': if bp_f == 0: bp_f = 1 bp = data[1] if data[0] == 'pul': if pul_f == 0: pul_f = 1 pul = data[1] if bp_f == 1 and bo_f == 1 and pul_f == 1: bo_f = bp_f = pul_f = 0 data = [] for i in range(len(bo)): t=storage.storage(bo[i],bp[i],pul[i]) data.append(t) alert_sys = Alert_module.Alert() for i in data: alert_sys.get_bo_data(i.read('bo')) alert_sys.get_bp_data(i.read('bp')) alert_sys.get_pul_data(i.read('pul')) self.Outqueue.put(['Alert',alert_sys.Alert_Output()]) ai = AiModule.AiModule() ai.input_check(bo,bp,pul) pbo,pbp,ppul=ai.predict() self.Outqueue.put(['AI',(pbo,pbp,ppul)]) else: print("Wating For Complete.") time.sleep(1) except BaseException: print("Waiting for input") time.sleep(3)
def execute_at_startup(window): database = storage.storage() if config.main["mysc"]["save_followers_in_autocompletion_db"] == True and config.main["other_buffers"]["show_followers"] == True: buffer = window.search_buffer("people", "followers") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if config.main["mysc"]["save_friends_in_autocompletion_db"] == True and config.main["other_buffers"]["show_friends"] == True: buffer = window.search_buffer("people", "friends") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2)
def main(): parser = createParser() argument = parser.parse_args(sys.argv[1:]) try: if argument.task == 1: if argument.subtask == 1: with open(argument.file, 'r') as file: text.repeats(file) elif argument.subtask == 2: with open(argument.file, 'r') as file: text.average(file) elif argument.subtask == 3: with open(argument.file, 'r') as file: text.median(file) elif argument.subtask == 4: with open(argument.file, 'r') as file: text.top(file) elif argument.task == 2: if argument.subtask == 1: lst = raw_input("Enter list: ").split() lst = [int(elem) for elem in lst] print sort.quicksort(lst) elif argument.subtask == 2: lst = raw_input("Enter list: ").split() lst = [int(elem) for elem in lst] print sort.mergesort(lst) elif argument.subtask == 3: lst = raw_input("Enter list: ").split() lst = [int(elem) for elem in lst] print sort.radixsort(lst) elif argument.task == 3: storage.storage() elif argument.task == 4: n = int(raw_input("Enter n: ")) fib.print_fib(n) except Exception as e: print e
def load_buckets(sconfig=None): # fp = open('dbpedia-blocked-old.txt') # fp = open('dbpedia-blocked-shortkey.txt') fp = open('dbpedia-blocked-longkey.txt') key_list = [line.strip().split(', ') for line in fp] if sconfig is None: sconfig = {'dict': None} hashtable = storage(sconfig, 0) for i in xrange(1, len(key_list)): # STARTS AT 1 kl = key_list[i] k = kl[0] l = kl[1].split() for x in l: hashtable.append_val(k, int(x)-1) # bug fix fp.close() return hashtable
def execute_at_startup(window, buffer, config): database = storage.storage(buffer.session.session_id) if ( config["mysc"]["save_followers_in_autocompletion_db"] == True and config["other_buffers"]["show_followers"] == True ): buffer = window.search_buffer("followers", config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if config["mysc"]["save_friends_in_autocompletion_db"] == True and config["other_buffers"]["show_friends"] == True: buffer = window.search_buffer("friends", config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2)
def exec_command(self): # import pdb # pdb.set_trace() if functions.curr_time() - self.curr_time >= 1: data = { 'network': network(self.network), 'cpu': cpu(self.cpu), 'storage': storage(self.storage), 'memory': memory(self.memory), 'timestamp': functions.curr_time(), 'id': self.system_data['id'] } # pdb.set_trace() print data['timestamp'] self.send_data(data, 'live') self.curr_time = functions.curr_time()
def execute_at_startup(window): database = storage.storage() if config.main["mysc"][ "save_followers_in_autocompletion_db"] == True and config.main[ "other_buffers"]["show_followers"] == True: buffer = window.search_buffer("people", "followers") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if config.main["mysc"][ "save_friends_in_autocompletion_db"] == True and config.main[ "other_buffers"]["show_friends"] == True: buffer = window.search_buffer("people", "friends") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2)
def activate(self, shell): self.shell = shell self.storage = storage() self.storage.shell = self.shell self.storage.player = self.shell.get_player() self.storage.db = self.shell.props.db self.httpserver = httpserver() self.httpserver.storage=self.storage self.httpserver.start() #Unseren Server advertisen self.AvahiThreadO = AvahiThread() self.AvahiThreadO.storage = self.storage self.AvahiThreadO.start() #blah self.AvahiThingsO = AvahiThings() self.AvahiThingsO.run(shell,self)
def execute_at_startup(window, buffer, config): database = storage.storage(buffer.session.session_id) if config["mysc"]["save_followers_in_autocompletion_db"] == True and config[ "other_buffers"]["show_followers"] == True: buffer = window.search_buffer("followers", config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if config["mysc"]["save_friends_in_autocompletion_db"] == True and config[ "other_buffers"]["show_friends"] == True: buffer = window.search_buffer("friends", config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2)
def __init__(self, dim, setI0, fList, matchFuncList=None, mergeFuncList=None, storage_config=None): self.dim = dim self.setI0 = setI0 self.fList = fList # format of fList: a list of items [features indices list] if storage_config is None: storage_config = {'dict': None} self.storage_config = storage_config if matchFuncList is None: matchFuncList = [fswoosh.levDist for _ in xrange(len(fList))] self._matchFuncList = matchFuncList if mergeFuncList is None: mergeFuncList = [fswoosh.pickLonger for _ in xrange(dim)] self._mergeFuncList = mergeFuncList self.Pf = [storage(self.storage_config, i) for i in xrange(len(fList))] self.Nf = [[] for i in xrange(len(fList))] self.setI = [] self.curRec = None
def add_users_to_database(self): config.main["mysc"]["save_friends_in_autocompletion_db"] = self.dialog.friends_buffer.GetValue() config.main["mysc"]["save_followers_in_autocompletion_db"] = self.dialog.friends_buffer.GetValue() output.speak(_(u"Updating database... You can close this window now. A message will tell you when the process finishes.")) database = storage.storage() if self.dialog.followers_buffer.GetValue() == True: buffer = self.window.search_buffer("people", "followers") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if self.dialog.friends_buffer.GetValue() == True: buffer = self.window.search_buffer("people", "friends") for i in buffer.db.settings[buffer.name_buffer]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2) wx_settings.show_success_dialog() self.dialog.Destroy()
def show_menu(self): position = self.window.text.GetInsertionPoint() text = self.window.text.GetValue() text = text[:position] try: pattern = text.split()[-1] except IndexError: output.speak(_(u"You have to start writing")) return if pattern.startswith("@") == True: db = storage.storage() menu = wx_menu.menu(self.window.text, pattern[1:]) users = db.get_users(pattern[1:]) if len(users) > 0: menu.append_options(users) self.window.PopupMenu(menu, self.window.text.GetPosition()) menu.Destroy() else: output.speak(_(u"There are not results in your users database")) else: output.speak(_(u"Autocompletion only works for users."))
def reconstruct(self, dim, setI0, fList, matchFuncList=None, mergeFuncList=None, storage_config=None): self.dim = dim self.setI0 = setI0 self.fList = fList # format of fList: a list of items [features indices list] fp = open('logs/merge_log.txt', 'w+') # clear merge log fp.close() if storage_config is None: storage_config = {'dict': None} self.storage_config = storage_config if matchFuncList is None: matchFuncList = [fswoosh.levDist for _ in xrange(len(fList))] self._matchFuncList = matchFuncList if mergeFuncList is None: mergeFuncList = [fswoosh.pickLonger for _ in xrange(dim)] self._mergeFuncList = mergeFuncList self.Pf = [storage(self.storage_config, i) for i in xrange(len(fList))] self.Nf = [[] for i in xrange(len(fList))] self.setI = [] self.curRec = None
def add_users_to_database(self): self.config["mysc"]["save_friends_in_autocompletion_db"] = self.dialog.get("friends_buffer") self.config["mysc"]["save_followers_in_autocompletion_db"] = self.dialog.get("followers_buffer") output.speak( _(u"Updating database... You can close this window now. A message will tell you when the process finishes.") ) database = storage.storage(self.buffer.session.session_id) if self.dialog.get("followers_buffer") == True: buffer = self.window.search_buffer("followers", self.config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]["items"]: database.set_user(i["screen_name"], i["name"], 1) else: database.remove_by_buffer(1) if self.dialog.get("friends_buffer") == True: buffer = self.window.search_buffer("friends", self.config["twitter"]["user_name"]) for i in buffer.session.db[buffer.name]["items"]: database.set_user(i["screen_name"], i["name"], 2) else: database.remove_by_buffer(2) wx_settings.show_success_dialog() self.dialog.destroy()
def __init__(self, spiderType, num_of_threads): logging.debug('get into __init__()') #MODIFY HERE!!!!! self.STORESIZE = 100 self.spiderType = spiderType self.num_of_threads = num_of_threads self.accounts = Queue.Queue() #work queue which spiders fetch id from self.workQueue = Queue.Queue() #result Queue which spiders put data self.resultQueue = Queue.Queue() # storage Class self.storage = storage() #deduplicator Class self.deduplicator = util.Deduplicator() # self.event = threading.Event() self.threads = [] self.initAccount() self.prepareSource() self.initThread()
def show_menu(self): position = self.window.text.GetInsertionPoint() text = self.window.text.GetValue() text = text[:position] try: pattern = text.split()[-1] except IndexError: output.speak(_(u"You have to start writing")) return if pattern.startswith("@") == True: db = storage.storage() menu = wx_menu.menu(self.window.text, pattern[1:]) users = db.get_users(pattern[1:]) if len(users) > 0: menu.append_options(users) self.window.PopupMenu(menu, self.window.text.GetPosition()) menu.Destroy() else: output.speak( _(u"There are not results in your users database")) else: output.speak(_(u"Autocompletion only works for users."))
def start(prj, roots, exclude=""): global st, from_vim st = storage.storage(prj) roots = roots.split(',') if exclude: exclude = exclude.split(',') else: exclude = [] roots = [os.path.realpath(p.strip()) for p in roots] exclude = [os.path.realpath(p.strip()).split(os.path.sep) for p in exclude] if not from_vim: sys.stdout.write ("Indexing ") else: vim.command('redraw | echo "Indexing "') for p in roots: walk(p, exclude) st.close() print ' Done. Processed %d Modules, %d modules changed.' % (count, modcount) if errors: sys.stderr.write('%d modules could not be indexed because of syntax errors. Use --debug option to see details.\n' % len(errors)) if DEBUG: for error in errors: sys.stderr.write(error)
def generate(a,b,c): bloodoxy = InputModule_lxc.readdata(a) bloodpressure = InputModule_lxc.readdata(b) pulse = InputModule_lxc.readdata(c) database = [] for i in range(len(bloodoxy)): database.append(storage.storage(bloodoxy[i],bloodpressure[i],pulse[i])) AI = AiModule.AiModule() AI.input_check(bloodoxy,bloodpressure,pulse) a,b,c=AI.predict() UserInterface_module.ai_output(a,b,c) alarm = Alert_Module_Xinsha.Alert() for i in range(len(bloodoxy)): alarm.get_bo_data(bloodoxy[i]) alarm.get_bp_data(bloodpressure[i]) alarm.get_pul_data(pulse[i]) UserInterface_module.alert_out(alarm.Alert_Output())
def get_raw_data(self,a_filename): l_store = storage.storage() self.m_content = l_store.read({'filename':a_filename}) ofile=open("results.xls","wb") ofile.write(self.m_content[0]['content'].encode('cp1252')) ofile.close()
def _init_hashtables(self, L, storage_config, storage_fid=None): """Initializes hash tables.""" self.L = L self.hash_tables = storage(L, storage_config, storage_fid)
def start(roots, exclude=[]): roots = [os.path.abspath(p.strip()) for p in roots] exclude = [os.path.abspath(p.strip()).split(os.path.sep) for p in exclude] sys.stdout.write ("Indexing ") for p in roots: walk(p, exclude) st.close() print ' Done. Processed %d Modules, %d modules changed.' % (count, modcount) print 'Total %d modules which has %d classes and %d functions.' % st.counts() if errors: sys.stderr.write('%d modules could not be indexed because of syntax errors. Use --debug option to see details.\n' % len(errors)) if DEBUG: for error in errors: sys.stderr.write(error) if __name__ == '__main__': if len(sys.argv) < 3: print 'Usage: python %s <proj-file> <source folders> [<exclude folders>] [--debug]' % sys.argv[0] print ' <proj-file> is the result which can be used in vimpy' print ' <source folders> and <exclude folders> can be comma separated list of folders as well [Optional]' print ' --debug will show errors if any during indexing. [Optional]' exit(1) st = storage.storage(sys.argv[1]) exclude = [] if len(sys.argv) >= 4: if not sys.argv[3] == '--debug': exclude = sys.argv[3].split(',') DEBUG = sys.argv[-1] == '--debug' start(sys.argv[2].split(','), exclude)
def __init__(self,signals_list=[]): self.storer = storage() self.signals_list = signals_list
def save(self, a_filename): l_store = storage.storage() l_store.write({'filename':a_filename,'content':self.m_content}) logging.debug("save done. " + a_filename)
def __init__(self, window, session_id): super(autocompletionUsers, self).__init__() self.window = window self.db = storage.storage(session_id)
def redis_storage(): return storage('redis', flush_db=True, host=redis_host, port=redis_port, db=redis_db)
def __init__(self, signals_list=[]): self.storer = storage() self.signals_list = signals_list
def tearDown(self): storage('redis', flush_db=True, host=redis_host, port=redis_port, db=redis_db)
#!/usr/bin/python import web import requests,string import markdown import storage import rediswebpy import json global configd configd = json.loads(open('config.json').read()) stor = storage.storage(configd['couch'],configd['database']) #web.config.debug = False urls = ( '/','home', '/author/(.*)','author', '/tags/(.*)','tags', '/slides/','slides', '/config','config', '/thing/(.*)','thing', '/attachment/(.*)','attachment', '/action/(.+)/(.+)','do_action', '/graph/(.*)','d3graph', '/slides','slides', '/stl/(.+)/(.+)','stl_view', '/image/(.+)/(.+)','image_view', '/cube/','cube' )
def GET(self): global mpl #syslog.syslog("BALU: server class invoked") user_data=web.input(); identity = user_data['i'] t = user_data['t'] d = user_data['d'] #syslog.syslog("BALU: i = %s, t = %s, d = %s" %(identity,t,d)); #lets split d over here to d = d.split(","); # fromat is: data(spaced),from_number,from_name,node_name from_number = d[1] from_name = d[2] node_name = d[3] d = d[0] if t == "MKP": # syslog.syslog("BALU: inside t == MKP") if 'sell' in d or 'Sell' in d: ret = d+" : " #this needs to be put in the queue message = d.split(' ') #format sell BALU 5kg 50 if message[1] not in mpl: mpl[message[1]] = [] mpl[message[1]].append(message[2]+','+message[3]) else: mpl[message[1]].append(message[2]+','+message[3]) syslog.syslog("BALU: post: %f,%s,%s,%s" %(time.time(),identity,t,d)); data_to_be_sent = {}; data_to_be_sent['to'] = from_number data_to_be_sent['msisdn'] = 767 #this could be problem because it might expect a string data_to_be_sent['text'] = ret+"posted" thread = get.get('http://'+node_name+'/marketplace_aws_handler','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start(); elif 'search' in d or 'Search' in d: ret = d+":" message = d.split(' ') #format search BALU if message[1] in mpl: if len(mpl[message[1]]) < 5: # for loop till len to compose the message for i in range(0,len(mpl[message[1]])): temp = mpl[message[1]][i]; temp = temp.split(',') temp = temp[0]+" at "+temp[1]+" per unit," ret = ret+temp # send this message back to the node here else: for i in range(0,5): temp = mpl[message[1]][-1*i]; temp = temp.split(',') temp = temp[0]+" at "+temp[1]+" per unit," ret = ret+temp else: ret = ret+"no such crop found" #Done: have to send message back to server, find this code. #Done: have to add 767 to the sip_buddies table in the subscriber registry db #Done: which db table will tell us from which server this stuff came? I should plug that stuff in here. #Done: testing data_to_be_sent = {}; data_to_be_sent['to'] = from_number data_to_be_sent['msisdn'] = 767 #this could be problem because it might expect a string data_to_be_sent['text'] = ret thread = get.get('http://'+node_name+'/marketplace_aws_handler','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start(); syslog.syslog("BALU: search: %f,%s,%s,%s" %(time.time(),identity,t,d)); elif 'buy' in d or 'Buy' in d: ret = d+" : " message = d.split(' ') #format buy BALU 5KG 52 if message[1] not in mpl: ret = ret+"There is no such crop" # then there is nothing to buy elif mpl[message[1]] == []: ret = ret+"probably sold" else: search = message[2]+","+message[3] index = -1; for i in range(0,len(mpl[message[1]])): if mpl[message[1]][i] == search: index = i; break; if index != -1: #item was found at index = index temp = mpl[message[1]][index].split(',') ret = ret+"You bought "+temp[0]+" at "+temp[1]+" per unit" #syslog.syslog("index = %d" %index) mpl[message[1]].remove(search) #could be moved to a transactions folder data_to_be_sent = {}; data_to_be_sent['to'] = from_number data_to_be_sent['msisdn'] = 767 #this could be problem because it might expect a string data_to_be_sent['text'] = ret thread = get.get('http://'+node_name+'/marketplace_aws_handler','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start(); syslog.syslog("BALU: get: %f,%s,%s,%s" %(time.time(),identity,t,d)); # for loop from last to first of size 5 to compose message # New user for naming and addressing testing elif(t == "NEW"): # disk_storage = storage.storage() # output = disk_storage.store(from_number,from_name,node_name,t) # if output is True: # #have to send back a response saying that i have saved something. # data_to_be_sent = {}; # data_to_be_sent['i'] = identity; # data_to_be_sent['t'] = t; # data_to_be_sent['d'] = "success" # #thread = get.get(node_name,'',data_to_be_sent); #node_name coming in each request is the ip of the handler # #thread.start(); # elif output is False: # data_to_be_sent = {}; # data_to_be_sent['i'] = identity; # data_to_be_sent['t'] = t; # data_to_be_sent['d'] = "already" # #thread = get.get(node_name,'',data_to_be_sent); #node_name coming in each request is the ip of the handler # #thread.start(); # #have to send back a response saying that the user already exists disk_storage = storage.storage() #disk_storage.clean(); # for i in range(0,self.how_many): from_number = from_number # print from_number; from_name = from_number; node_name = "anything"; t="NEW" output = disk_storage.store(from_number,from_name,node_name,t) if output is True: #have to send back a response saying that i have saved something. syslog.syslog("BALU: uid=%s and timesaved=%s" %(from_number,str(time.time()))) elif output is False: syslog.syslog("BALU: uid=%s and timesaved=%s" %(from_number,str(time.time()))) elif(t == "NEW HASH"): #hash_ring = myhash.Hash(["8090","8091","8092","8093","8094","9090","9091","9092","9093","9094"]) #temp = hash_ring.get_md5(identity) x = int(identity)%5 if x == 1: node = 8090 elif x ==2: node = 8091 elif x ==3: node = 8092 elif x ==4: node = 8093 else: node = 8094 #node = hash_ring.get_node(temp) print node; data_to_be_sent = {} data_to_be_sent['i'] = identity; data_to_be_sent['t'] = "NEW"; data_to_be_sent['d'] = user_data['d']; # syslog.syslog("BALU: uid=%s and time=%s" %(identity,str(time.time()))) thread = get.get('http://127.0.0.1:'+str(node)+'/server','',data_to_be_sent); thread.start(); syslog.syslog("BALU: Node selected is %s" %node); elif(t == "CLEAN"): disk_storage = storage.storage() disk_storage.clean();
def process3(p3): # Storage print('-------Start Processing Storage system......') for i in range(len(OutInputBo)): storage.storage(OutInputBo[i], OutInputBp[i], OutInputPul[i]).store() print('-------Finished Processing Storage model-------')
def save(self): l_store = storage.storage() l_store.write({'filename':'all_hist.csv ','content':self.m_content})
def __init__(self): self.answers = [] self.datastore = storage.storage() self.answers = self.datastore.getanswers()
fd.close() return "Binary message written!" else: return "415 Unsupported Media Type ;)" app.config['flask_profiler'] = { "enabled": True, "storage": { "engine": "mongodb" }, "basicAuth": { "enabled": True, "username": "******", "password": "******" }, "ignore": ["^/static/.*"] } flask_profiler.init_app(app) if __name__ == '__main__': f = Dictionary(storage_type='redis', edit_distance_max=2, best_suggestions_only=False) store = storage(storage_type='redis') pool = redis.ConnectionPool( host='localhost', port=6379, decode_responses=True) # host是redis主机,需要redis服务端和客户端都起着 redis默认端口是6379 r = redis.Redis(connection_pool=pool) app.run(host="127.0.0.1", port=5666)
def init_storage(): return storage()
def create(self,db_name): self.file_obj=storage(db_name) self.file_obj.fcreate(db_name) return 1
pul_path = "./pul.txt" Bo = [] Bp = [] Pul = [] print("------Start processing input------") OutInputBo = Input_module.read_data(bo_path) OutInputBp = Input_module.read_data(bp_path) OutInputPul = Input_module.read_data(pul_path) print("------Finish processing input------\n") print("------Start processing storage------") print("data stored successfully") for i in range(len(OutInputBo)): s = storage.storage(OutInputBo[i], OutInputBp[i], OutInputPul[i]) Bo.append(s.read('bo')) Bp.append(s.read('bp')) Pul.append(s.read('pul')) print("------Finish processing storage------\n") print("------Start processing Alert system------") alert = Alert_module.Alert() for i in range(len(Bo)): alert.Alert_for_three_categories_input([Bo[i], 0]) for i in range(len(Bp)): alert.Alert_for_three_categories_input([Bp[i], 1]) for i in range(len(Pul)): alert.Alert_for_three_categories_input([Pul[i], 2]) UI.alert_out(alert.Alert_Output()) print("------Finish processing Alert system------\n")
def setUp(self): store = storage(None) # None means built-in dictionary storage self.oi = OriginalTerms(store) self.st = SuggestTerms(store)
def loadconfig(type): SCRIPTPATH = os.path.dirname(os.path.realpath(__file__)) with open('/etc/hippo-idp/' + type + '.yml') as cfgstream: config = yaml.load(cfgstream) return config class InvalidTokenRequest(Exception): pass class ClientAuthenticationFailure(Exception): pass log = logging.getLogger('idp') config = loadconfig('config') clients = loadconfig('clients') redis_port = config['sessionstore']['port'] redis_address = config['sessionstore']['address'] idservice = config['idservice']['address'] jwt_expiry_seconds = config['session']['jwtexpiryseconds'] subject_attribute_name = None if 'subject' in config['attributes']: subject_attribute_name = config['attributes']['subject'] storage(redis_address, redis_port)
def _init_hashtables(self): self.hash_tables = [ storage(self.storage_config, i) for i in xrange(self.num_hashtables) ]
#!/usr/bin/python '''Run updates for Soundcloud Flask Application''' import storage storage_client = storage.storage() print 'starting feed update' storage_client.update() print 'starting favourites updates' storage_client.update_favorites() print 'end'
metavar='last', help='the number of (latest) records to retrieve') args = parser.parse_args() last = args.last node_id = args.node_id if node_id is None: print 'ID of the node must be specified. Example: python db2csv.py --node_id 4' exit() capability = read_capability() dbtag = capability[node_id]['dbtag'] print 'From sensor_data.db reading node_{:03d}...'.format(node_id) store = storage() if last is None: print 'Reading entire database... (to get only the last N entries, use the --last switch)' tmp = store.read_all(node_id, dbtag) else: print 'Reading last {} records...'.format(last) tmp = store.read_latest(node_id, count=last) print 'Writing to file...' with open('sensor_data_node_{:03d}.csv'.format(node_id), 'w', 0) as f: f.write(','.join(dbtag) + '\n') tmp = [tmp[t] for t in dbtag] for r in zip(*tmp): #print r[0].strftime('%Y-%m-%d %H:%M:%S') ts = time.mktime(r[0].timetuple()) tmp = ','.join([str(v) for v in [ts] + list(r[1:])])