def main (argv): logger = initLogger(); if (len(argv) == 0): print "Missing argument. Options: init, store, list, test, get, restore"; elif (argv[0] == "init"): init.init(archiveDir); elif (argv[0] == "store"): if (len(argv) < 2): print "Usage: mybackup store <directory>"; else: store.store(archiveDir, argv[1], logger); elif (argv[0] == "list"): if (len(argv) < 2): listBackups.list(archiveDir) else: listBackups.list(archiveDir, argv[1]) elif (argv[0] == "get"): if (len(argv) < 2): print "Usage: mybackup get <pattern>"; else: restore.getFile(archiveDir, argv[1]); elif (argv[0] == "restore"): if (len(argv) < 2): restore.restoreAll(archiveDir) else: restore.restoreAll(archiveDir, argv[1]) elif (argv[0] == "test"): test.test(archiveDir, logger) else: print "Unknown option: "+argv[0];
def mainapp(servername): import json import datetime import os import time import store import takeout import seenaq print("Establishing secure connection...") with open("storage/otmssl.crt", "a") as certificate: date = datetime.date.today() certificate.write("\n") certificate.write(str(date)) print("Established.") print("Connected.") print("What do you want to do?") print("1. Store things in the warehouse.") print("2. Take things out of the warehouse.") print("3. See everything in the warehouse.") print("q. Exit.") while True: sth = input(">> ") if sth == "1": store.store(servername) elif sth == "2": takeout.takeout(servername) elif sth == "3": seenaq.seenaq(servername) elif sth == "q": exit()
def out_of_battle(): print( "You are now walking down a road. What do you do? \n1. Continue to walk \n2. Rest\n3. Go Home\n4. Store" ) what_happens_action = str(input()) if what_happens_action == "1": what_happens = random.randint(1, 5) if what_happens == 1: print("You continue to walk.") out_of_battle() else: print("You see a figure coming closer to you.") from battle_backup import battle battle() elif what_happens_action == "2": print("You rested. You now have full health.") out_of_battle() elif what_happens_action == "3": print("You went home. Do you go back out") decision = str(input()) if decision == "yes": out_of_battle() else: print("You go to sleep.") from game_part_1 import intro intro() elif what_happens_action == "4": from store import store store() else: out_of_battle()
def write(): message = request.json print('testing') email = message.get('email', '') print(email) print('testing') store(email) return jsonify(message)
def setUp(self): conf = utils.load_conf("test_config.yml") neo4j_driver = utils.connect_to_neo4j( conf["neo4j"]["host"], conf["neo4j"]["bolt_port"], conf["neo4j"].get("user", None), conf["neo4j"].get("password", None)) session = neo4j_driver.session() self.static_inventory = load_ansible_inventory( conf["ansible"]["playbook_dir"], conf["ansible"]["inventory_path"], None) store(session, self.static_inventory, conf["label_name"]) session.close()
def main(): cmds = sys.argv if len(cmds) > 1: ui = cmds[1].lower() if ui == 'init': init() if backup.readyToBackup(): if ui == 'restore' and len(backup.getIndex()) > 0: if len(cmds) > 2: restore(os.path.join(backup.restDir, cmds[2])) else: print("Restoring to default location %s " % backup.restDir) restore(backup.restDir) print("All files restored.") elif ui == 'get': try: get(cmds[2]) except IndexError: print("Please give a pattern eg. 'file' ") elif ui == 'test': fileEntries = validateArchiveIndex() invalidFiles = validateFiles() #print(fileEntries) #print("Correct Entries: %s " % fileEntries['Correct Entries']) #print("Incorrect Entries: %s " % archiveList['Incorrect Entries']) if fileEntries['Erroneous Paths'] != []: print("Erroneous Paths: %d" % len(fileEntries['Erroneous Paths'])) print('\n'.join(' {}: {}'.format(*x) \ for x in enumerate(fileEntries['Erroneous Paths'],1))) elif ui == 'list': if len(cmds) > 2: print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(cmds[2]),1))) else: listFiles() print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(),1))) elif ui == 'store' and len(cmds) > 2: if os.path.isdir(cmds[2]): store(cmds[2]) else: print("invalid directory") else: print("not ready... run 'cat' init") else: print("run 'init' to initialise the program")
def main(): logger.info(f"SCRAPER,JOB,INITIATED,{DATE},") init() tickers = _connector.get_equity_tickers(N_USD) checkpoint = len(tickers) / BATCH_SIZE checkpoint = int(checkpoint / 4) faults_summary = { "options" : {}, "analysis" : {}, "keystats" : {}, "ohlc" : {} } db_flags, db_stats = [], [] ############################################################################################### for batch_id, batch in enumerate(range(BATCH_SIZE, len(tickers) + BATCH_SIZE, BATCH_SIZE)): ticker_batch = tickers[batch - BATCH_SIZE : batch] results = batch_main(batch_id, ticker_batch) b_fault_summary, b_db_flag, b_db_stats = results for key in b_fault_summary: for ticker in b_fault_summary[key]: faults_summary[key][ticker] = b_fault_summary[key][ticker] db_flags.append(b_db_flag) db_stats.append(b_db_stats) success, failure = get_job_success_rates(tickers[ : BATCH_SIZE * (1 + batch_id)]) send_metrics(success, failure) # if batch_id % checkpoint == 0 and batch_id != 0: # report("Partial", success, failure, faults_summary, db_flags, db_stats) ############################################################################################### success, failure = get_job_success_rates(tickers) report("Full", success, failure, faults_summary, db_flags, db_stats) store() logger.info(f"SCRAPER,JOB,TERMINATED,{DATE},")
def deposit(self, amount, id): if amount < 0: raise ValueError('Invalid amount.') acc = Account(self.get_events(id)) acc.make_deposit(amount) event_store = store() event_store.save(acc.id, acc.changes)
def interpret_init(): env = environment() s_store = store() for op in ops: env.pushTo(op[0], s_store.alloc()) s_store.setAt(env.lookUp(op[0], s_store), op[1]) return env, s_store
def predict(): string = str('test') hist_pred_n = string + "hist_pred.jpeg" # Loading from .pkl files pkl_hnd = store(app.config['static_path'], app.root_path) clf = pkl_hnd.load('model') n_labels = pkl_hnd.load('n_labels') enc = pkl_hnd.load('enc') # Feature extraction data = utils.file_parser_test( os.path.join(app.config['upload_path'], "test.txt")) features = utils.feature_extractor(data['text'], 5000) # Preprocessing features data_x = utils.preprocess_features(features, 2500) # Predicting pr = predict_model(data_x) pred_enc = pr.predict_model(clf) # Decoding the encoded prediction pred = utils.label_encoder(pred_enc, True, enc) pkl_hnd.save_pred(data_x, pred) # Saving predicted value and data into .csv file #Plotting histogram of prediction pkl_hnd.plot_hist(pred, hist_pred_n) return render_template( "predict_result.html", img_hist_pred=url_for(app.config['static_path'], filename=hist_pred_n), )
def __init__(self): self.__store = store() self.__dataxml = dataxml() self.__dataxml.read('old.xml', self.__store) self.materialspage = materialspage(self.__store) self.productspage = productspage(self.__store) self.sellspage = sellspage(self.__store)
def auth(username, password): store_data = store.store() store_data.create() store_data["auth"] = False token = store_data.session_key req = srv.CreateAuthPacket(code=pyrad.packet.AccessRequest, User_Name=username) req["Acct-Status-Type"] = "Start" req["User-Password"] = req.PwCrypt(password) req["NAS-IP-Address"] = gw_address req["NAS-Port"] = config.custom_nas_port req["NAS-Port-Type"] = config.custom_nas_port_type # MAC OF WIFIDOG "00-10-A4-23-19-C0" req["NAS-Identifier"] = config.node_mac req["Acct-Session-Id"] = token # MAC OF WIFIDOG "00-10-A4-23-19-C0" req["Called-Station-Id"] = config.node_mac # MAC OF USER OR IP "00-00-B4-23-19-C0" req["Calling-Station-Id"] = mac req["Framed-IP-Address"] = ip req["Service-Type"] = pyrad.packet.AccessRequest req["Acct-Delay-Time"] = 0 req["Acct-Input-Octets"] = 0 req["Acct-Output-Octets"] = 0 # WISPr-Location-ID = "isocc=,cc=,ac=,network=Coova,Wicoin_Test" req["WISPr-Location-ID"] = str(config.custom_wispr_location_id) # WISPr-Location-Name = "Wicoin_Test" req["WISPr-Location-Name"] = str(config.custom_wispr_location_name) # http://7.0.0.1:2060/wifidog/auth?logout=1&token=4f473ae3ddc5c1c2165f7a0973c57a98 req["WISPr-Logoff-URL"] = "http://" + str(gw_address) + ':' + str( gw_port) + "/wifidog/auth?logout=1&token=" + str(token) reply = SendPacket(srv=srv, req=req) auth_message = reply.code if reply.code == pyrad.packet.AccessAccept: store_data["auth"] = True store_data["username"] = username store_data["password"] = password store_data["session_start"] = time.time() store_data["auth_message"] = " User Access Accept " store_data["auth_response"] = reply.code for i in reply.keys(): store_data[i] = reply[i][0] elif reply.code == pyrad.packet.AccessReject: if "Reply-Message" in reply: store_data["auth_message"] = " User Access Reject -" + \ str(reply["Reply-Message"][0]) else: store_data["auth_message"] = " User Access Reject " store_data["auth_response"] = reply.code else: store_data[ "auth_message"] = " An error occurred during the validation process " store_data["auth_response"] = reply.code store_data.save() return store_data
def process_client(name, email): event_store = store() client = Client() client.create_client(name, email) event_store.save(client.id, client.changes) return client.id
def __init__(self, context=None, case=None): self.logger = Logger.getLogger() self.assertion = assertion() self.logger.debug('init store') self.store = store(context, case) self.device = DeviceManager.getInstance().getDevice() self._isRecord = context.isRecording() self._checkpoint = -1
def display_data(input_str): res = {} if 1: jsonObj = ast.literal_eval(input_str) dObj = store.store() res = dObj.save(jsonObj) res = json.dumps(res) return res
def visited(subject, sid): visits = store.store(Visit) now = datetime.datetime.now() visit = visits.first(session=sid) if visit: visit.ended = now visits.put(visit) else: visits.put(Visit(session=sid, subject=subject, started=now, ended=now))
def achieved(goal, subject): """Record that a goal has been achieved""" now = datetime.datetime.now() entry = dict(goal=goal, subject=subject, username=user.username, timestamp=now) achievements = store.store(Achievement) achievements.put(entry)
def get(id): event_store = store() stream = event_store.load(id) if stream.version == -1: raise ClientNotFoundException(id) acc = Client(stream.events) json_acc = {'name': acc.name, 'email': acc.email} return json_acc
def withdrawal(self, amount, id): if amount < 0: raise ValueError('Invalid amount.') acc = Account(self.get_events(id)) if acc.balance - amount < 0: raise ValueError('You don\'t have that amount to withdrawal.') acc.make_withdrawal(amount) event_store = store() event_store.save(acc.id, acc.changes)
def store_order(): s = store(2) s.store('a', 0) s.store('b', 1) s.store('c', 2) s.store('d', 3) assert s.getData(0) == ['a', 'b', 'c', 'd'] s.store('a', 4) assert s.getData(0) == ['b', 'c', 'd', 'a']
def get(): api_key = request.args["api_key"] key = request.args["k"] s = store(api_key) if s is None: return f"Error, key {api_key} was invalid", 403 value = s.get(key) if value is None: return f"Error, key {key} not found in database", 404 return value, 200
def process_account(self, client_id): event_store = store() client_stream = event_store.load(client_id) if client_stream.version == -1: raise ClientNotFoundException(client_id) account = Account() account.create_account(client_id) event_store.save(account.id, account.changes) #self.send_email(account.id) return account.id
def __init__(self, address="http://localhost:12000/", time_step=500): self.__app = None self.__typemap = {} self.__name2type = {} self.object_store = store() if not address.endswith('/'): address += '/' self.__address = address self.__base_address = None self._time_step = (float(time_step) / 1000)
def __init__(self): self.__store = store() self.__dataxml = dataxml() self.__dataxml.read('new.xml', self.__store) self.__datasql = datasql() self.materialspage = materialspage(self.__store) self.productspage = productspage(self.__store) self.sellspage = sellspage(self.__store) self.savexml = savexmlpage(self.__store, self.__dataxml, self.__datasql) self.savesql = savesqlpage(self.__store, self.__dataxml, self.__datasql)
def user_input(): username = username_field.get() password = password_field.get() level = int(level_field.get()) url = url_field.get() option = int(option_field.get()) accuracy = None sessions = None try: tmp = int(accuracy_field.get()) accuracy = tmp tmp = int(sessions_field.get()) sessions = tmp except: pass if option == 0: store(level, url, username, password) elif accuracy and sessions: for i in range(sessions): exercise(level, url, username, password, accuracy, option)
def __init__(self, ui, path): self._url = path self.ui = ui self.root = path u = util.url(path.rstrip('/') + "/.hg") self.path, authinfo = u.authinfo() opener = build_opener(ui, authinfo) self.opener = opener(self.path) self.vfs = self.opener self._phasedefaults = [] self.names = namespaces.namespaces() try: requirements = scmutil.readrequires(self.vfs, self.supported) except IOError as inst: if inst.errno != errno.ENOENT: raise requirements = set() # check if it is a non-empty old-style repository try: fp = self.vfs("00changelog.i") fp.read(1) fp.close() except IOError as inst: if inst.errno != errno.ENOENT: raise # we do not care about empty old-style repositories here msg = _("'%s' does not appear to be an hg repository") % path raise error.RepoError(msg) # setup store self.store = store.store(requirements, self.path, opener) self.spath = self.store.path self.svfs = self.store.opener self.sjoin = self.store.join self._filecache = {} self.requirements = requirements self.manifest = manifest.manifest(self.svfs) self.changelog = changelog.changelog(self.svfs) self._tags = None self.nodetagscache = None self._branchcaches = {} self._revbranchcache = None self.encodepats = None self.decodepats = None self._transref = None
def put(): s = store(request.args["api_key"]) if s is None: return f"Error, key {request.args['api_key']} was invalid", 403 key, value = request.args["k"], request.data value = value.decode("utf-8") method = request.args.get("method", "replace") if method == "replace": s.put(key, value) elif method == "append": s.append(key, value) else: return f"Invalid method: {method}", 400 return f"Success", 200
def interp_ast(toy): env = environment() s_store = store() for op in ops: env.pushTo(op[0], s_store.alloc()) s_store.setAt(env.lookUp(op[0], s_store), op[1]) try: answer, aStore = stop_catch_interpretter(toy, env, s_store) print(str(answer)) # Catch all valid cases of exceptions thrown by the program itself except (errors.UndeclaredException, errors.PrimopDomainError, errors.ArgumentParameterMismatch, errors.ClosureOrPrimopExpected, errors.StopContinuation) as e: print(str(e))
def process_main(help_function, function_name: str, dataset_name, *args): """ The main function for the process. The function can work by only using this function. @param help_function: the help function, complish main process logic @param function_name: the name of the function @param dataset_name: the dataset need to process @param args: the remain parameters, for help function @return: the json result of the query """ # assert the dataset name is valid dataset_name = assert_dataset(function_name, dataset_name) # get the json file name json_file = function_name + "_" + str(dataset_name) + "_" + "_".join([str(i) for i in args]) # if the query result exist in store directory, get result from store directory if store.exist(json_file): return store.load(json_file) # if the query result doesn't exist in store directory, get result from the help function json_result = help_function(dataset_name, args) # store the json result store.store(json_file, json_result) return json_result
def main(): # 功能选择 print("+---------------请选择操作:------------+\n") print("1.--------入库--------\n") print("2.--------出库--------\n") print("3.--------查询--------\n") print("4.--------统计--------\n") warn.warn() str = int(input("\n请输入:")) #功能实现 if str == 1: store.store() elif str == 2: reduce.reduce() elif str == 3: query.query() elif str == 4: print("------------统计功能---------\n") print("1.----------库元件总价值统计---------------\n") print("2.----------每月出库元件总价值统计---------\n") print("3.----------各类元件本月消耗量-------------\n") first_value = int(input("请选择统计类:")) statistics.statistics(first_value)
def store_test(): s = store(2) s.store('a', 0) assert s.getData(0) == ['a'] s.store('b', 1) assert s.getData(0) == ['a', 'b'] s.store('c', 2) s.store('d', 3) assert s.getData(1) == ['a', 'b', 'c', 'd'] assert s.getData(2) == ['a', 'b', 'c', 'd'] assert s.getData(3) == ['b', 'c', 'd'] assert s.getData(4) == ['c', 'd'] s.store('a', 4) assert s.getData(5) == ['d', 'a'] assert s.getData(6) == ['a'] assert s.getData(7) == []
def __init__(self): # interface to magnetometer self.magnetometer = magnetometer(gauss=4.7, declination=(-2, 5)) # interface to barometer self.barometer = barometer() # interface to persistent store self.store = store() # interface to GPS/GPRS module self.sim = sim() # current heading self.current_heading = None # current GPS data (json returned from sim.get_gps()) self.current_gps = None # current baromoeter data self.current_barom = {}
def __init__(self, configFile, jid, password, ssl=False, plugin_config = {}): self.configFile = configFile self.botconfig = self.loadConfig(configFile) sleekxmpp.sleekxmpp.xmppclient.__init__(self, jid, password, ssl, plugin_config) basebot.__init__(self) storageXml = self.botconfig.find('storage') if storageXml is not None: self.store = store(storageXml.attrib['file']) else: logging.warning("No storage element found in config file - proceeding with no persistent storage, plugin behaviour may be undefined.") self.rooms = {} self.botPlugin = {} self.pluginConfig = {} self.add_event_handler("session_start", self.start, threaded=True) self.auto_authorize = False self.register_bot_plugins() self.registerCommands()
def __init__(self, address="http://127.0.0.1:12000/", time_step=500): frame.framelist.append(self) self.thread = None self.__app = None self.__host_typemap = {} self.__typemap = {} self.__name2type = {} self.object_store = store() if not address.endswith('/'): address += '/' self.__address = address self._time_step = (float(time_step) / 1000) self.__new = {} self.__mod = {} self.__del = {} self.__observed_types = set() self.__observed_types_new = set() self.__observed_types_mod = set()
def __init__(self, config_file, ssl=False, plugin_config = {}): """ Initializes the bot config_file -- string pointing to an xml configuration file """ self.config_file = config_file self.botconfig = self.load_config(config_file) auth = self.botconfig.find('auth') logging.info("Logging in as %s" % auth.attrib['jid']) sleekxmpp.ClientXMPP.__init__(self, auth.attrib['jid'], auth.attrib['pass'], auth.get('ssl', True), plugin_config) storageXml = self.botconfig.find('storage') if storageXml is not None: self.store = store(storageXml.attrib['file']) else: logging.warning("No storage element found in config file - proceeding with no persistent storage, plugin behaviour may be undefined.") self.rooms = {} self.add_event_handler("session_start", self.handle_session_start, threaded=True) self.register_xmpp_plugins() CommandBot.__init__(self) PlugBot.__init__(self, default_package = 'sleekbot.plugins') self.register_adhocs()
if inst.errno != errno.ENOENT: raise # we do not care about empty old-style repositories here msg = _("'%s' does not appear to be an hg repository") % path raise error.RepoError(msg) requirements = [] # check them for r in requirements: if r not in self.supported: raise error.RepoError(_("requirement '%s' not supported") % r) # setup store def pjoin(a, b): return a + '/' + b self.store = store.store(requirements, self.path, opener, pjoin) self.spath = self.store.path self.sopener = self.store.opener self.sjoin = self.store.join self.manifest = manifest.manifest(self.sopener) self.changelog = changelog.changelog(self.sopener) self.tagscache = None self.nodetagscache = None self.encodepats = None self.decodepats = None def url(self): return self._url def local(self):
if i < len(words): res.append(abd+' '+words[i]) i+=1 else: res.append(words[i]) i += 1 return res def wikipediaTran(): pass if __name__ == "__main__": if len(sys.argv) < 3: print "usage: $translate.py repository [word | fileOfWords]" sys.exit() engine = store.store(googleTran, os.path.abspath(sys.argv[1])) if os.path.isfile(os.path.abspath(sys.argv[2])): words = [l.decode('utf-8') for l in open(os.path.abspath(sys.argv[2]),'r').read().splitlines()] else: words = [w.decode('utf-8') for w in sys.argv[2:]] res = [] for line in words: res.extend(getWords(line)) words = res if len(words) > 10: engine.prepare(words) else: for w in words: print w.encode('utf-8'),engine.get(w.decode('utf-8')).encode('utf-8') engine.save()
def get(self): self.rss, self.NEW_DATE = RSSUtil.fetchRSS(self.request.url, self.NEW_DATE) self.response.headers['Content-Type'] = 'text/xml' store.store(self.rss)
#! /usr/bin/env python import store def op(i): print i return str(int(i)+1) if __name__=="__main__": bla = store.store(op,'repo') for i in range(10): bla.get(str(i)) for i in range(10): bla.get(str(i)) bla.save()
from store import store from zoom.utils import Record def make_a_link(item): return '<a href="%(url)s">%(title)s</a>' % item class Flag(Record): link = property(lambda a: make_a_link(a)) def __init__(self, *a, **k): self.update(dict(url='', title='', icon='')) Record.__init__(self, *a, **k) flags = store(Flag)
def computer(function,**kwargs): """ Compute function figures out how to run a calculation over a simulation. """ work = kwargs['workspace'] calc = kwargs['calc'] #---perform a calculation over all collections if 'collections' in calc: cols = tuple([calc['collections']]) if type(calc['collections'])==str else calc['collections'] sns = unique(flatten([work.vars['collections'][i] for i in cols])) else: sns = work.sns() #---get slices (required) slice_name = calc['slice_name'] group = calc['group'] if 'group' in calc else None #---pass data to the function according to upstream data type incoming_type = calc['uptype'] jobs,data = [],dict([(sn,{}) for sn in sns]) combined_slices = [] for sn in sns: new_job = {'sn':sn,'slice_name':slice_name,'group':group} if incoming_type == 'simulation': #---prepare combinations in a dictionary if slice_name not in work.slice(sn): raise Exception( '\n[ERROR] the slices yaml file is missing a slice named "%s" for simulation "%s"'% (slice_name,sn)) try: mfp = work.slice(sn)[slice_name][group]['missing_frame_percent'] except: print "[WARNING] no missing frame percentage here" mfp = 0.0 if mfp>work.missing_frame_tolerance: status('upstream slice failure: %s,%s,%s missing_frame_percent=%.1f'%( sn,slice_name,group,mfp),tag='warning') continue #---defaulting to 'all' group if group is None new_job['grofile'] = work.postdir+\ work.slice(sn)[slice_name][group if group else 'all']['gro'] #---! xtc must become a flag. recommend 'xtc' becomes work.cursor[1] #---defaulting to 'all' group if group is None new_job['trajfile'] = work.postdir+work.slice(sn)[slice_name][group if group else 'all']['xtc'] if 'specs' not in calc: calc['specs'] = '' if 'upstream' in calc['specs']: #---if no loop on upstream you can use a list if type(calc['specs']['upstream'])==list: upstream_ask = dict([(key,None) for key in calc['specs']['upstream']]) elif type(calc['specs']['upstream'])==str: upstream_ask = {calc['specs']['upstream']:None} else: upstream_ask = calc['specs']['upstream'] for key,val in upstream_ask.items(): upspecs = deepcopy(work.calc[key]) #---identify the list of particular options along with the stubs options,stubs = work.interpret_specs(upspecs,return_stubs=True) #---identify paths and values over which we "whittle" the total list of specs whittles = [(i,j) for i,j in catalog(val)] #---if no loop on upstream pickles we interpret none and send blank specs if val in ['None','none',None]: specs = [options[ss] for r,v in whittles for ss,s in enumerate(stubs)] else: #---select the correct option by matching all catalogued routes from the incoming #---...key to the original calculation specs = [options[ss] for r,v in whittles for ss,s in enumerate(stubs) if delve(s['specs'],*r)==v] if len(specs)!=1 and 'loop' not in upspecs['slice_name']: import pdb;pdb.set_trace() raise Exception('[ERROR] redundant upstream selection %s'%str(select)) #---if there are multiple slices #---! note that we expect that if slice_names is a list it will be ordered here too for slicenum,spec in enumerate(specs): #---if the upstream calculation has a group then use it in the filename if not group: if 'group' in work.calc[key]: upgroup = work.calc[key]['group'] else: upgroup = None else: upgroup = group if not upgroup: sl = work.slice(sn)[spec['slice_name']] fn_base = re.findall('^v[0-9]+\.[0-9]+-[0-9]+-[0-9]+', work.slice(sn)[upspecs['slice_name']]['all']['filekey'] )[0]+'.%s'%key else: sl = work.slice(sn)[spec['slice_name']][upgroup] fn_base = '%s.%s'%(sl['filekey'],key) #---! moved the following block left recently fn = work.select_postdata(fn_base,spec) if not fn: print '[ERROR] missing %s'%fn import pdb;pdb.set_trace() outkey = key if len(specs)==1 else '%s%d'%(key,slicenum) #---before each calculation the master loop loads the filename stored here data[sn][outkey] = os.path.basename(fn)[:-4]+'dat' new_job['upstream'] = data[sn].keys() jobs.append(new_job) #---master loop for outgoing in jobs: sn,slice_name,group = outgoing['sn'],outgoing['slice_name'],outgoing['group'] #---if we combine slices for this calculation we use the whole time span in the base filename if type(slice_name)==list: #---! simple method for making the combination file key start = min([work.slice(sn)[s]['all' if not group else group]['start'] for s in slice_name]) end = max([work.slice(sn)[s]['all' if not group else group]['end'] for s in slice_name]) skip = work.slice(sn)[s]['all' if not group else group]['skip'] #---! this filekey construction means the user will have to anticipate the names of combos fn_base = '%s.%d-%d-%d.%s'%(work.prefixer(sn),start,end,skip,function.__name__) else: #---we index all calculations automatically in case we loop over specs later index,fn_key = -1,'' if not group: fn_base = re.findall('^v[0-9]+\.[0-9]+-[0-9]+-[0-9]+', work.slice(sn)[slice_name][ 'all' if not group else group]['filekey'])[0]+'.%s'%function.__name__ else: try: fn_base = work.slice(sn)[slice_name][ 'all' if not group else group]['filekey']+'.%s'%function.__name__ except: print "no group and cannot get base filename" import pdb;pdb.set_trace() prev = glob.glob(work.postdir+fn_base+'*.dat') if prev == []: index = 0 else: index = max(map(lambda x:int(re.findall('^.+\/%s\.n([0-9]+)\.dat'%fn_base,x)[0]),prev))+1 fn_key = '.n%d'%index fn = fn_base+fn_key+'.dat' #---safety check for file errors to prevent overwriting however this should be handled by indices if os.path.isfile(work.postdir+fn): raise Exception('[ERROR] %s exists'%(work.postdir+fn)) #---check for specs file with the exact same specifications exists = True if index != -1 and work.select_postdata(fn_base,calc) != None else False if not exists: import ipdb;ipdb.set_trace() status("%s %s"%(function.__name__,str(outgoing)),tag='compute') outgoing['workspace'] = work outgoing['calc'] = calc if 'upstream' in outgoing: sn = outgoing['sn'] outgoing['upstream'] = dict([(k, load(data[sn][k],work.postdir)) for k in outgoing['upstream']]) result,attrs = function(**outgoing) """ spec files are carefully constructed they prevent redundant calculations they allow us to loop over many parameters while saving files with a single index the calculation dictionary in the specs file contains meta-parameters for looping we are careful not to save meta parameters to the spec file we only save parameters which are relevant to the calculation itself the calculation dictionary in the spec file must therefore separate these parameters in a sub-dictionary called 'specs' we prefer attrs to be small and specific since attrs is also used to uniquely specify the data all big data should be stored as a result via numpy """ #---if any calculation specifications are not in attributes we warn the user here if 'specs' in calc: unaccounted = [i for i in calc['specs'] if i not in attrs] else: unaccounted = [] if 'upstream' in unaccounted and 'upstream' not in attrs: status('automatically appending upstream data',tag='status') unaccounted.remove('upstream') attrs['upstream'] = calc['specs']['upstream'] if any(unaccounted): print computer_error_attrs_passthrough+'\n\n' status('some calculation specs were not saved: %s'% str(unaccounted),tag='STATUS') import pdb;pdb.set_trace() store(result,fn,work.postdir,attrs=attrs) with open(work.postdir+fn_base+fn_key+'.spec','w') as fp: fp.write(json.dumps(attrs)+'\n') #---no modifications to work so no save return
def createPlayerAttributes(self): self.wallet=wallet.wallet(20) self.devMart=store.store() self.stand=lemonadestand.lemonadestand()
# print "sys.argv is: ", sys.argv # # i = 0 # for arg in sys.argv: # print "sys.argv[%d] is %s" % (i, arg) # i += 1 if sys.argv[1] == "get": get.get(sys.argv[2]) elif sys.argv[1] == "init": init.init() elif sys.argv[1] == "list": if len(sys.argv) > 2: list.list(sys.argv[2]) else: list.list(None) elif sys.argv[1] == "restore": restore.restore(sys.argv[2]) elif sys.argv[1] == "store": store.store(sys.argv[2]) elif sys.argv[1] == "test": test.test()
def storeProblem(filename,d,nSensor,nAnchor,sigma,posX,trueDisX,noisyDisX): store.store(filename,d,nSensor,nAnchor,sigma,posX,trueDisX,noisyDisX)
d = c.getresponse() c.close() except: return '' if int(d.status) == 200: results = json.load(d) totalHits = str(results["ysearchresponse"]["totalhits"]) return totalHits else: print "Error:start" print d.status, d.reason print d.read() print "Error:end" return '' engine = store.store(search, os.path.abspath(sys.argv[1])) if __name__ == "__main__": if len(sys.argv) < 3: print "usage: $yahooResults.py repository [word(s)|file]" sys.exit() if os.path.isfile(sys.argv[2]): fName = os.path.abspath(sys.argv[2]) items = [] for line in open(fName, 'r').read().splitlines(): q = '+'.join(sorted(filter(lambda x: x,line.split(' ')))) items.append(q) engine.prepare(items) else: for word in sys.argv[2:]:
placing the <dz:snippet> tag in a document or template. >>> from system import system >>> system.setup_test() >>> for s in snippets.find(name='test'): s.delete() >>> snippets.find(name='test') [] >>> t = snippets.put(Snippet(name='test', body='some text')) >>> snippets.find(name='test') [<Snippet {'name': 'test', 'body': 'some text'}>] """ pass snippets = store(Snippet) def snippet(name, variant=None, default='', markdown=False): snippet = snippets.first(name=name, variant=variant) if snippet: snippet['impressions'] = snippet.get('impressions', 0) + 1 snippets.put(snippet) result = snippet.body else: result = default if markdown: return tools.markdown(result) else: return result
""" Load quills for the porcupine. """ import random import string import interrupt import load import store modules = [ interrupt.interrupt(), load.load(), store.store() ] def render(seed, count): random.seed(seed) ops = [random.choice(modules).genOp() for i in range(count)] return string.join(ops, ';\n\t') + ';'