def Accrue(self, request, context): cust_id = request.customer_id bsn_id = request.business_id failure = ppb.AccrualResponse(success=False) if not util.exists(self.db.users(), cust_id): return failure if not util.exists(self.db.businesses(), bsn_id): return failure bsn_name = self.db.businesses().where(id=bsn_id).get().name act = self.db.accounts().where(customerid=cust_id).where( businessid=bsn_id).get() if not act: return failure self.log.info("Accruing {} points to customer {}", request.point_amount, cust_id) act.points += request.point_amount trans = data.Transaction(bsn_id, cust_id, request.point_amount, int(datetime.now().strftime("%s"))) self.db.update_transaction(trans) self.db.update_account(act) self.notifier.notify(cust_id, (bsn_id, bsn_name, request.point_amount)) return ppb.AccrualResponse(success=True)
def check_dir(path, force): if exists(path) and force: warnings.warn(f'{path} exists and will be overwritten') rmtree(path) makedirs(path) elif not exists(path): makedirs(path) else: warnings.warn( f'{path} exists, --force not specified, continuing with existing directory' )
def test_user_is_created(self): # Setup if os.path.exists(constants.DB_PATH + self.MOCK_USER): shutil.rmtree(constants.DB_PATH + self.MOCK_USER) # Validate assert not util.exists(self.MOCK_USER) assert util.exists(self.MOCK_USER) # Cleanup if os.path.exists(constants.DB_PATH + self.MOCK_USER): shutil.rmtree(constants.DB_PATH + self.MOCK_USER)
def test_mark_finished(self): # Setup path = constants.DB_PATH + self.MOCK_USER + "/" + self.MOCK_CHALLENGE + '.done' if os.path.exists(path): os.remove(path) util.exists(self.MOCK_USER) # Validate assert not os.path.exists(path) util.mark_finished(self.MOCK_USER, self.MOCK_CHALLENGE) assert os.path.exists(path) # Cleanup if os.path.exists(path): os.remove(path)
def do_rm(self, arg): src = os.path.join(self.current, arg) if not exists(self.up, src): pass else: Del(self.up, src)
def collect_directory(self): # Initialize Process Counter curr = 0 # Initialize Image Paths List self.image_list = [] # Congregate Image Files for path, subdirs, files in os.walk(str(self.image_path)): for file in files: # Select Files With Supported File Types if (file.endswith((".jpg", ".jpeg", ".png", ".tiff", ".JPG", ".JPEG", ".PNG", ".TIFF"))): # Create Local Image Data Map image_data = { 'dir': util.absolute(path), 'name': file, 'path': util.form_path([path, file]) } # Move File To Opened Path if (self.image_path != image_data['dir']): util.move( image_data['path'], util.absolute( util.form_path( [self.image_path, image_data['name']]))) # Add File To Image List self.image_list.append( util.form_path([self.image_path, image_data['name']])) # Update Prompt print("\rLoaded {} Images - {}".format( curr, self.image_path), end="") curr += 1 # Delete Empty Directories for file in os.listdir(str(self.image_path)): # Create Normalized Path dir_path = util.form_path([self.image_path, file]) # Check Directory Existence if (util.exists(dir_path)): try: # Remove Empty Directory os.rmdir(dir_path) except OSError: pass # Update Prompt print("\rLoaded All Images - {}".format(self.image_path)) # Verify Image List Length if (not (self.image_list)): util.perror("spectra: No images found") # Sort Image Paths in Lexicographical Order self.image_list.sort()
def test_get_status(self): ns = ("Not Started", "red") dep = ("Deployed / Unfinished", "black", self.MOCK_CONTRACT_ADDRESS) fin = ("Done!", "green", self.MOCK_CONTRACT_ADDRESS) with run.app.app_context(): stat = util.get_status(self.MOCK_USER, 1) self.assertTupleEqual(stat, ns) util.exists(self.MOCK_USER) stat = util.get_status(self.MOCK_USER, 1) self.assertTupleEqual(stat, ns) util.write_address(self.MOCK_USER, 1, self.MOCK_CONTRACT_ADDRESS) stat = util.get_status(self.MOCK_USER, 1) self.assertTupleEqual(stat, dep) util.mark_finished(self.MOCK_USER, "01_naive_programmer") stat = util.get_status(self.MOCK_USER, 1) self.assertTupleEqual(stat, fin)
def get_conferences(): files = util.listdir(CONFERENCE_FOLDER) util.mkdir(CONFERENCE_CRALWED_FOLDER) cnt = 0 conf = util.load_json('conf_name.json') for file_name in files: save_path = os.path.join(CONFERENCE_CRALWED_FOLDER, file_name) if util.exists(save_path): continue data = util.load_json(os.path.join(CONFERENCE_FOLDER, file_name)) if data['short'] not in conf.keys(): continue html = util.get_page(data['url']) subs = get_subs(data['short'], html) data['name'] = conf[data['short']] data['sub'] = {} for sub in subs: if sub not in conf.keys(): continue html = util.get_page('http://dblp.uni-trier.de/db/conf/' + sub) data['sub'][sub] = {} data['sub'][sub]['pub'] = get_publications(html) data['sub'][sub]['name'] = conf[sub] cnt += 1 print cnt, len(files), data['short'] util.save_json(save_path, data)
def check_csv(file, force): with open(file) as f: content = f.read() for line, row in enumerate(content.split()): dwi_mask = [element for element in row.split(',') if element] # handling w/space if len(dwi_mask) != 2: raise FileNotFoundError( f'Columns don\'t have same number of entries: check line {line} in {file}' ) dirCheckFlag = 1 for img in dwi_mask: if not exists(img): raise FileNotFoundError( f'{img} does not exist: check line {line} in {file}') elif dirCheckFlag: # create DTI and harmonization directory dtiPath = pjoin(dirname(img), 'dti') check_dir(dtiPath, force) harmPath = pjoin(dirname(img), 'harm') check_dir(harmPath, force) dirCheckFlag = 0
def single(title, test=False): notify("") FMT, started = "%Y-%m-%d %H:%M", datetime.datetime.now() notify("+++++++++++++++++++++++++++++++++++++++++++++ " + started.strftime(FMT)) global TEST TEST = test #isinstance(current, basestring) notify(title) if util.exists(title): print("product already exists...") return import shutil if not TEST: if len(title) == 36: #download directly from DHuS: notify("Downloading...", False) uuid = title md5sum = util.urlOpen(SITE["CHECKSUM"] % uuid).read() zipFull, duration = download(SITE["SAFEZIP"] % uuid, TMP, title + ".zip", md5sum) notify("%s %f" % (md5sum, duration)) safeDir = unpack(zipFull) else: safeDir = os.path.join(TMP, title + ".SAFE") #append .SAFE to the product title #send uuid of downloaded product to second queue dms.send("SP-iddownloaded", uuid) ended = datetime.datetime.now() notify("--------------------------------------------- %s %d" % (ended.strftime(FMT), (ended - started).total_seconds()))
def fetch_update(API_TOKEN, tag=None, output_dir='/ota-next', cache=False, verbose=True): # Try to connect to wifi wlan = get_connection() if wlan is None: raise RuntimeError('No network connection.') if not util.exists(output_dir): os.mkdir(output_dir) repo = Repo(REPO_URL, api_token=API_TOKEN, username=USERNAME) if tag is None: tag = repo.latest_version() downloaded = [] for path in ('/app', '/lib'): contents = repo.tag_contents(tag, path=path) try: downloaded += repo.download(contents, root=output_dir + path, cache=cache, verbose=verbose) except Exception as exception: print('error fetching update: `%s`' % contents) raise with open(output_dir + '/VERSION', 'w') as output: output.write(tag + '\n') gc.collect() return downloaded
def swap(previous, current, next_): try: if util.exists(previous): util.rmtree(previous) os.mkdir(previous) for path in ('/app', '/lib', '/VERSION'): if util.exists(current + path): os.rename(current + path, previous + path) if util.exists(next_ + path): os.rename(next_ + path, current + path) print('updated: `%s`' % (current + path)) gc.collect() finally: if util.exists(next_): util.rmtree(next_)
def forward(self, x, context=None, mask=None, context_mask=None, mems=None, return_hiddens=False): hiddens = [] intermediates = [] prev_attn = None prev_cross_attn = None mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers for idx, (layer_type, (norm, block, residual_fn)) in enumerate( zip(self.layer_types, self.layers)): is_last = idx == (len(self.layers) - 1) if layer_type == "a": hiddens.append(x) layer_mem = mems.pop(0) residual = x if self.pre_norm: x = norm(x) if layer_type == 'a': out, inter = block(x, mask=mask, sinusoidal_emb=self.pia_pos_emb, rel_pos=None, prev_attn=prev_attn, mem=layer_mem) elif layer_type == 'c': out, inter = block(x, context=context, mask=mask, sinusoidal_emb=self.pia_pos_emb, context_mask=context_mask, prev_attn=prev_cross_attn) elif layer_type == 'f': out = block(x) x = residual_fn(out, residual) if layer_type in ('a', 'c'): intermediates.append(inter) if not self.pre_norm and not is_last: x = norm(x) if return_hiddens: intermediates = LayerIntermediates( hiddens=hiddens, attn_intermediates=intermediates) return x, intermediates return x
def boot(): from util import exists if not exists(filename): factory() cfg = model.load(filename) interface.config(dhcp_hostname=cfg['dhcp_hostname']) if 'ssid' in cfg: interface.connect(cfg['ssid'], cfg['passwd']) logger.info(ujson.dumps(get()))
async def handle_stop(self, entity, item, action, data): filepath = self.get_path(item) code = 200 if not exists(filepath): print('media not found: %s' % filepath) code = 404 else: print('stopping %s' % item) await self.media.stop() return (code, None)
def do_rename(self, arg): src, dst = arg.split() src = os.path.join(self.current, src) dst = os.path.join(self.current, dst) if not exists(self.up, src): return else: Rename(self.up, src, dst)
def load(): ''' .. versionchanged:: 0.8.1 Check if config file exists before trying to open it. ''' CONFIG.clear() if util.exists('config.json'): with open('r') as input_: CONFIG.update(json.load(input_)) return CONFIG
def python_which(module_name): """ Look up *module_name* in sys.path and return it if found """ rval = '' mname = module_name.replace('.', '/') for path in sys.path: dname = U.pj(path, mname) pname = dname + ".py" cname = dname + ".pyc" if U.exists(dname): rval = dname break elif U.exists(cname): rval = cname break elif U.exists(pname): rval = pname break return rval
def stage(relpath, add_to=True): ri = read_index() # find the first instance of relpath ridx = next((i for i, j in enumerate(ri) if j[1] == relpath), None) # index structure # blob_hash, rel_path, mtime, size if add_to: bs = wstat(relpath) # file is already in the index, it might need to be updated with a new blob, and stats if ridx is not None: # the file has been updated, update its stats if ri[ridx][2] != bs[0] or ri[ridx][2] != bs[0]: bc = read(relpath) bh = hashbuf(bc) # if the blob does not already exists, then create, otherwise, reuse if not exists(bh): objects.write(bh, bc) ri[ridx] = (bh, relpath, bs[0], bs[1]) else: bc = read(relpath) bh = hashbuf(bc) # if the blob does not already exists, then create, otherwise, reuse if not exists(bh): objects.write(bh, bc) ri.append((bh, relpath, bs[0], bs[1])) # bvc stage rm relpath # remove the file from working directory and remove from index else: if ridx is not None: delete(relpath) del ri[ridx] # sort the index sri = sorted(ri, key=lambda x: x[1]) write_index(sri)
def boot(): from util import exists if not exists(filename): factory() cfg = model.load(filename) for name in cfg: pin = cfg[name]['pin'] default = cfg[name]['default'] dev = device(pin) dev.freq(1024) dev.duty(default) logger.info(ujson.dumps(cfg))
def __test_register_client(self, new_client): new_address = new_client.address if util.exists(self.__clients, lambda x: x.address == new_address): return 'duplicated client address: {}'.format(new_address) if self.__config.max_peer_count is not None: if len(self.__clients) >= self.__config.max_peer_count: return 'exceeding max peer count {}'.format( self.__config.max_peer_count) return None
def test_exists(self): with run.app.app_context(): self.assertFalse(util.exists(self.VALID_ADDRESSES[1])) conn = sqlite3.connect(constants.DB_PATH) cur = conn.cursor() resp = cur.execute("SELECT * FROM htctable") val = list(resp)[0] self.assertTupleEqual(val, ( 1, #userid self.VALID_ADDRESSES[1], #useraddress 0, # score 0, # c0state None, # c0deployaddr 0, # c0finished 0, # c1state None, # c1deployaddr 0, # c1finished 0, #c2state None, # c2deployaddr 0, # c2finished 0, #c3state None, # c3deployaddr 0, # c3finished 0, #c4state None, # c4deployaddr 0, # c4finished 0, # c5state None, # c5deployaddr 0, # c5finished 0, #c6state None, # c6deployaddr 0, # c6finished 0, #c7state None, # c7deployaddr 0 # c7finished )) conn.close() self.assertTrue(util.exists(self.VALID_ADDRESSES[1]))
async def handle_play(self, entity, item, action, data): print('handling play') filepath = self.get_path(item) print('file path: %s' % filepath) code = 200 if not exists(filepath): print('media not found: %s' % filepath) code = 404 else: print('playing %s' % item) if self.media.playing_file != filepath: await self.media.play(filepath) return (code, None)
def do_get(self, arg): src, dst = arg.split() if exists(self.up, src): pass else: src = os.path.join(self.current, src) if os.path.exists(dst): pass else: dst = os.path.join(os.getcwd(), dst) Get(self.up, src, dst)
def get_authors(): files = util.listdir(AUTHOR_FOLDER) util.mkdir(AUTHOR_CRALWED_FOLDER) for file_name in files: save_path = os.path.join(AUTHOR_CRALWED_FOLDER, file_name) if util.exists(save_path): continue data = util.load_json(os.path.join(AUTHOR_FOLDER, file_name)) html = util.get_page(data['url']) full_name = get_full_name(html) data['name'] = full_name print data['short'], full_name data['links'] = get_links(data['short'], html) util.save_json(save_path, data)
def parse(args): # Verify Argument List Length if (util.empty(args)): util.perror("spectra: No arguments found") # Verify Directory Existence elif (not (util.exists(args[0]))): util.perror("spectra: Invalid image path") # Verify Argument Count elif ((len(args) - 1) % 2): util.perror("spectra: Invalid argument format") # Initialize Formatted Arguments List form_args = [] # Set Directory Argument form_args.append(args[0]) # Check Arguments For Supported Flags for FLAG in SPECTRA_FLAGS: # Set Blur Sensitivity Argument if (FLAG in args): try: # Get Threshold Value threshold = int(args[args.index(FLAG) + 1]) # Range Fix Threshold if (not (threshold is None)): if (threshold < 0): threshold = 0 elif (threshold > 100): threshold = 100 except ValueError: util.perror("spectra: Unexpected argument data type") except: util.perror( "spectra: Fatal error while parsing input arguments") # Modify Threshold if (FLAG == SCENE_SENSITIVITY): threshold = 100 - threshold # Add As Formatted Argument form_args.append(threshold) else: # Append Empty Argument form_args.append(None) # Return Formatted Arguments List return (form_args)
def pull(conf): webrtc_path = util.getpath(config.PATH_WEBRTC) util.cd(webrtc_path) if not util.exists(webrtc_path, '.gclient'): util.exec('fetch', '--nohooks', 'webrtc') util.cd(webrtc_path, 'src') util.exec('git', 'reset', '--hard') util.exec('git', 'fetch', 'origin') util.exec('git', 'checkout', "{}{}".format(config.WEBRTC_BRANCH_PREFIX, conf["branch"])) util.exec('gclient', 'sync', '-D')
def perl_which(module_name): """ Look for and return the path of the perl module """ # pdb.set_trace() rval = "" result = pexpect.run("perl -E \"say for @INC\"") rlines = result.strip().split("\r\n") perlmod = module_name.replace("::", "/") + ".pm" # print("-----") for line in rlines: cpath = U.pj(line, perlmod) # print cpath if U.exists("%s" % cpath): rval += cpath + "\n" return rval.strip()
def perl_which(module_name): """ Look for and return the path of the perl module """ # pdb.set_trace() rval = "" result = th.rm_cov_warn(pexpect.run("perl -E \"say for @INC\"")) z = result.strip().split("\r\n") m = module_name.replace("::", "/") + ".pm" # print("-----") for x in z: cpath = U.pj(x, m) # print cpath if U.exists("%s" % cpath): rval += cpath + "\n" return rval.strip()
def _download_in_video_quizzes(course, item): """ Download in-video quizzes. """ path = '{}/video/quizzes/{}.json' path = path.format(course.get_folder(), item['item_id']) if item['__in_video_quiz_v2']: _download_new_quizzes(course, item, path) else: _download_old_quizzes(course, item, path) if util.exists(path): content = util.read_file(path) content = util.remove_coursera_bad_formats(content) util.write_file(path, content)
def do_put(self, arg): src, dst = arg.split() if os.path.exists(src): pass else: src = os.path.join(os.getcwd(), src) if not os.path.exists(src): return if exists(self.up, dst): pass else: dst = os.path.join(self.current, dst) Put(self.up, src, dst)
def dashboard(address): challenges = {} for challenge_id in constants.CHALLENGES: challenges[challenge_id] = json.loads( open("challenges/" + challenge_id + ".json").read().strip()) challenges[challenge_id]["code"] = open("challenges/" + challenge_id + ".sol").read().strip() challenge_id_int = int(challenge_id.split("_")[0]) challenges[challenge_id]["status"] = util.get_status( address, challenge_id_int) challenges[challenge_id]["deployed"] = (len( challenges[challenge_id]["status"]) == 3) return render_template('dashboard.html', address=address, challenge_ids=constants.CHALLENGES, challenges=challenges, exists=util.exists(address))
def make_branch(name): # prevent a branch from creating if it already exists if util.exists(os.path.join('.bvc', 'branches', name)): return cc = refs.head() hh = util.read(os.path.join('.bvc', 'HEAD')).strip() # create a new commit that serves as the root of the branch cb = "%s\t%s\t%s" %(cc[0], time.time(), 'Create branch: ' + name) ch = filespace.hashbuf(cb) objects.write(ch, cb) # update the ref log refs.append(ch) # creates the branches file util.write(os.path.join('.bvc', 'branches', name), ch + os.linesep)
def dashboard(): address = request.args.get("address", None).strip() if "|" in address: return "Error" # todo full validation challenges = {} for challenge_id in config.challenges: challenges[challenge_id] = json.loads( open("challenges/" + challenge_id + ".json").read().strip()) challenges[challenge_id]["code"] = open("challenges/" + challenge_id + ".sol").read().strip() challenges[challenge_id]["status"] = util.get_status( address, challenge_id) challenges[challenge_id]["deployed"] = (len( challenges[challenge_id]["status"]) == 3) return render_template('dashboard.html', address=address, challenge_ids=config.challenges, challenges=challenges, exists=util.exists(address))
def get_conferences(): files = util.listdir(CONFERENCE_FOLDER) util.mkdir(CONFERENCE_CRALWED_FOLDER) cnt = 0 conf = util.load_json('conf_name.json') for file_name in files: save_path = os.path.join(CONFERENCE_CRALWED_FOLDER, file_name) if util.exists(save_path): continue data = util.load_json(os.path.join(CONFERENCE_FOLDER, file_name)) if data['short'] not in conf.keys(): continue html = util.get_page(data['url']) subs = get_subs(data['short'], html) data['name'] = conf[data['short']] data['sub'] = {} if len(subs) == 0: data['sub']['#'] = get_publications(html) util.save_json(save_path, data) cnt += 1
def get_journals(): files = util.listdir(JOURNAL_FOLDER) util.mkdir(JOURNAL_CRALWED_FOLDER) cnt = 0 jour = util.load_json('jour_name.json') for file_name in files: save_path = os.path.join(JOURNAL_CRALWED_FOLDER, file_name) if util.exists(save_path): continue data = util.load_json(os.path.join(JOURNAL_FOLDER, file_name)) if data['short'] not in jour.keys(): continue html = util.get_page(data['url']) subs = get_subs(data['short'], html) data['name'] = jour[data['short']] data['sub'] = {} if len(subs) == 0: data['sub']['#'] = get_publications(html) util.save_json(save_path, data) cnt += 1 print cnt, len(files), data['short']
def get_journals(): files = util.listdir(JOURNAL_FOLDER) util.mkdir(JOURNAL_CRALWED_FOLDER) cnt = 0 jour = util.load_json('jour_name.json') for file_name in files: save_path = os.path.join(JOURNAL_CRALWED_FOLDER, file_name) if util.exists(save_path): continue data = util.load_json(os.path.join(JOURNAL_FOLDER, file_name)) if data['short'] not in jour.keys(): continue html = util.get_page(data['url']) subs = get_subs(data['short'], html) data['name'] = jour[data['short']] data['sub'] = {} for sub in subs: html = util.get_page('http://dblp.uni-trier.de/db/journals/' + sub) data['sub'][sub] = {} data['sub'][sub]['pub'] = get_publications(html) data['sub'][sub]['name'] = jour[sub] cnt += 1 print cnt, len(files), data['short'] util.save_json(save_path, data)
def boot(self): from util import exists if not exists(self.filename): self.factory()
def do_mkdir(self, arg): src = os.path.join(self.current, arg) if not exists(self.up, src): Mkd(self.up, src)