def configitems(self, section, untrusted=False): items = self._configitems(section, untrusted=untrusted, abort=True) if self.debugflag and not untrusted and self.ucdata: uitems = self._configitems(section, untrusted=True, abort=False) for k in util.sort(uitems): if uitems[k] != items.get(k): self.warn(_("Ignoring untrusted configuration option " "%s.%s = %s\n") % (section, k, uitems[k])) return util.sort(items.items())
def load_tests(directories=config.test_directories): """Load all tests from 'directories'. """ for d in directories: test_files = util.directory(d, '[a-z].*\.py$') set_files = util.directory(d, '[A-Z].*\.py$') for f in util.sort(test_files) + util.sort(set_files): execfile(os.path.join(d, f), globals())
def test_sort(self): a = [] result = util.sort(a) self.assertEqual(result, []) a = [3, 1, 2] result = util.sort(a) self.assertEqual(result, [1, 2, 3]) a = [0, 9, 3] result = util.sort(a, "cow") self.assertEqual(result, [0, 9, 3]) a = ['a', 'z', 'j'] result = util.sort(a, "desc") self.assertEqual(result, ['z', 'j', 'a'])
def hook(ui, repo, name, throw=False, **args): r = False if _redirect: # temporarily redirect stdout to stderr oldstdout = os.dup(sys.__stdout__.fileno()) os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno()) try: for hname, cmd in util.sort(ui.configitems('hooks')): if hname.split('.')[0] != name or not cmd: continue if callable(cmd): r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r elif cmd.startswith('python:'): r = _pythonhook(ui, repo, name, hname, cmd[7:].strip(), args, throw) or r else: r = _exthook(ui, repo, hname, cmd, args, throw) or r finally: if _redirect: os.dup2(oldstdout, sys.__stdout__.fileno()) os.close(oldstdout) return r
def _all_classes(prefix): g = copy.copy(globals()) class_names = [name for name in g if name.find(prefix) == 0] return [( name, g[name], ) for name in util.sort(class_names)]
def iterate(): if follow and not m.files(): ff = followfilter(onlyfirst=opts.get('follow_first')) def want(rev): if ff.match(rev) and rev in wanted: return True return False else: def want(rev): return rev in wanted for i, window in increasing_windows(0, len(revs)): yield 'window', revs[0] < revs[-1], revs[-1] nrevs = [rev for rev in revs[i:i+window] if want(rev)] for rev in util.sort(list(nrevs)): fns = fncache.get(rev) if not fns: def fns_generator(): for f in change(rev)[3]: if m(f): yield f fns = fns_generator() yield 'add', rev, fns for rev in nrevs: yield 'iter', rev, None
def updatedir(ui, repo, patches, similarity=0): """Update dirstate after patch application according to metadata""" if not patches: return copies = [] removes = {} cfiles = patches.keys() cwd = repo.getcwd() if cwd: cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()] for f in patches: gp = patches[f] if not gp: continue if gp.op == "RENAME": copies.append((gp.oldpath, gp.path)) removes[gp.oldpath] = 1 elif gp.op == "COPY": copies.append((gp.oldpath, gp.path)) elif gp.op == "DELETE": removes[gp.path] = 1 for src, dst in copies: repo.copy(src, dst) removes = removes.keys() if (not similarity) and removes: repo.remove(util.sort(removes), True) for f in patches: gp = patches[f] if gp and gp.mode: islink, isexec = gp.mode dst = repo.wjoin(gp.path) # patch won't create empty files if gp.op == "ADD" and not os.path.exists(dst): flags = (isexec and "x" or "") + (islink and "l" or "") repo.wwrite(gp.path, "", flags) elif gp.op != "DELETE": util.set_flags(dst, islink, isexec) cmdutil.addremove(repo, cfiles, similarity=similarity) files = patches.keys() files.extend([r for r in removes if r not in files]) return util.sort(files)
def _picktool(repo, ui, path, binary, symlink): def check(tool, pat, symlink, binary): tmsg = tool if pat: tmsg += " specified for " + pat if not _findtool(ui, tool): if pat: # explicitly requested tool deserves a warning ui.warn(_("couldn't find merge tool %s\n") % tmsg) else: # configured but non-existing tools are more silent ui.note(_("couldn't find merge tool %s\n") % tmsg) elif symlink and not _toolbool(ui, tool, "symlink"): ui.warn(_("tool %s can't handle symlinks\n") % tmsg) elif binary and not _toolbool(ui, tool, "binary"): ui.warn(_("tool %s can't handle binary\n") % tmsg) elif not util.gui() and _toolbool(ui, tool, "gui"): ui.warn(_("tool %s requires a GUI\n") % tmsg) else: return True return False # HGMERGE takes precedence hgmerge = os.environ.get("HGMERGE") if hgmerge: return (hgmerge, hgmerge) # then patterns for pat, tool in ui.configitems("merge-patterns"): mf = util.matcher(repo.root, "", [pat], [], [])[1] if mf(path) and check(tool, pat, symlink, False): toolpath = _findtool(ui, tool) return (tool, '"' + toolpath + '"') # then merge tools tools = {} for k,v in ui.configitems("merge-tools"): t = k.split('.')[0] if t not in tools: tools[t] = int(_toolstr(ui, t, "priority", "0")) names = tools.keys() tools = util.sort([(-p,t) for t,p in tools.items()]) uimerge = ui.config("ui", "merge") if uimerge: if uimerge not in names: return (uimerge, uimerge) tools.insert(0, (None, uimerge)) # highest priority tools.append((None, "hgmerge")) # the old default, if found for p,t in tools: if check(t, None, symlink, binary): toolpath = _findtool(ui, t) return (t, '"' + toolpath + '"') # internal merge as last resort return (not (symlink or binary) and "internal:merge" or None, None)
def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): addremove(repo, pats, opts) m = match(repo, pats, opts) if pats: modified, added, removed = repo.status(match=m)[:3] files = util.sort(modified + added + removed) def is_dir(f): name = f + '/' i = bisect.bisect(files, name) return i < len(files) and files[i].startswith(name) for f in m.files(): if f == '.': continue if f not in files: rf = repo.wjoin(f) rel = repo.pathto(f) try: mode = os.lstat(rf)[stat.ST_MODE] except OSError: if is_dir(f): # deleted directory ? continue raise util.Abort(_("file %s not found!") % rel) if stat.S_ISDIR(mode): if not is_dir(f): raise util.Abort(_("no match under directory %s!") % rel) elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): raise util.Abort(_("can't commit %s: " "unsupported file type!") % rel) elif f not in repo.dirstate: raise util.Abort(_("file %s not tracked!") % rel) m = matchfiles(repo, files) try: return commitfunc(ui, repo, message, m, opts) except ValueError, inst: raise util.Abort(str(inst))
def compare(data1, data2, mode='diff'): data1 = precheck(data1) data2 = precheck(data2) result = [] if mode == 'diff': result = data1.copy() for i in data2: if i in result: result.remove(i) else: data2 = list(set(data2)) for i in data2: if data1.count(i) != 0: result.append(i) result = sort(result) return result
def walk(self, match): fdict = dict.fromkeys(match.files()) # for dirstate.walk, files=['.'] means "walk the whole tree". # follow that here, too fdict.pop('.', None) for fn in self: for ffn in fdict: # match if the file is the exact name or a directory if ffn == fn or fn.startswith("%s/" % ffn): del fdict[ffn] break if match(fn): yield fn for fn in util.sort(fdict): if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn): yield fn
def _walk(self, relpath, recurse): '''yields (unencoded, encoded, size)''' path = self.pathjoiner(self.path, relpath) striplen = len(self.path) + len(os.sep) l = [] if os.path.isdir(path): visit = [path] while visit: p = visit.pop() for f, kind, st in osutil.listdir(p, stat=True): fp = self.pathjoiner(p, f) if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): n = util.pconvert(fp[striplen:]) l.append((n, n, st.st_size)) elif kind == stat.S_IFDIR and recurse: visit.append(fp) return util.sort(l)
def _findoldnames(fctx, limit): "find files that path was copied from, back to linkrev limit" old = {} seen = {} orig = fctx.path() visit = [(fctx, 0)] while visit: fc, depth = visit.pop() s = str(fc) if s in seen: continue seen[s] = 1 if fc.path() != orig and fc.path() not in old: old[fc.path()] = (depth, fc.path()) # remember depth if fc.rev() < limit and fc.rev() is not None: continue visit += [(p, depth - 1) for p in fc.parents()] # return old names sorted by depth return [o[1] for o in util.sort(old.values())]
def get_reversed_hosts(value, extensive): source1 = source.get_reverse_from_yougetsignal(value, extensive) source2 = source.get_reverse_from_logontube(value, extensive) domains=[] error=False if source1: domains = domains + source1 else: error=True if source2: domains = domains + source2 else: error=True if error: logger.warning('[*] One source responded badly: Reverse ip lookup may be inaccurate') domains = util.remove_duplicates(domains) domains = util.sort(domains) return domains
def __init__(self, repo, parents, text, files, filectxfn, user=None, date=None, extra=None): self._repo = repo self._rev = None self._node = None self._text = text self._date = date and util.parsedate(date) or util.makedate() self._user = user parents = [(p or nullid) for p in parents] p1, p2 = parents self._parents = [changectx(self._repo, p) for p in (p1, p2)] files = util.sort(util.unique(files)) self._status = [files, [], [], [], []] self._filectxfn = filectxfn self._extra = extra and extra.copy() or {} if 'branch' not in self._extra: self._extra['branch'] = 'default' elif self._extra.get('branch') == '': self._extra['branch'] = 'default'
def add(self, manifest, files, desc, transaction, p1=None, p2=None, user=None, date=None, extra={}): user = user.strip() if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) user, desc = util.fromlocal(user), util.fromlocal(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra and extra.get("branch") in ("default", ""): del extra["branch"] if extra: extra = self.encode_extra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2)
def get_reversed_hosts(value, extensive): source1 = source.get_reverse_from_yougetsignal(value, extensive) source2 = source.get_reverse_from_logontube(value, extensive) domains = [] error = False if source1: domains = domains + source1 else: error = True if source2: domains = domains + source2 else: error = True if error: logger.warning( '[*] One source responded badly: Reverse ip lookup may be inaccurate' ) domains = util.remove_duplicates(domains) domains = util.sort(domains) return domains
def circularity_filter(cnts, min, max, sorted=False): ''' 根据圆度筛选轮廓 圆度计算公式: e =(4 * PI * area)/ (length * length) :param cnts: 轮廓list :param min: 最小圆度 :param max: 最大圆度 :param sorted: 是否排序,默认Fal :return: cnt_list ''' eList = [] newList = [] for c in cnts: length = cv.arcLength(c, True) area = cv.contourArea(c) e = (4 * np.pi * area) / (length * length) if min <= e <= max: eList.append(e) newList.append(c) return newList if not sorted else sort(newList, eList)
def list(): question_file = database_manager.import_questions_for_list() titles = ['ID','Submission time','Title',] sort = request.args.get('sort') search = request.args.get('search') if sort: # If sort has a value sorted_question_file = util.sort(sort) return render_template('list.html', titles=titles, questions=sorted_question_file, page_title = 'AskMate - List of questions') if search: search_result = util.search_results(question_file,search) return render_template('list.html', titles=titles,questions=search_result, page_title = 'AskMate - List of questions') return render_template('list.html',titles=titles, questions=question_file, page_title = 'AskMate - List of questions')
def encode_extra(self, d): # keys must be sorted to produce a deterministic changelog entry items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)] return "\0".join(items)
def neighbours(base, max_neig=2, eps_dist=0.09, ncpus=1): """ Look for neigbours as many times as needed until no new neighbours are found. This method is not efficient (checks 1 more neighbour than needed), but it is effective. latt_vec: Lattice vectors base_up: List of basis elements nvec: number of repetitions of the latt vecs to get all the max_neig neighbors eps_dist: distance quanta, anything closer than eps_dist is considered to be at the same point ncpus: number of cpus used to calculate """ nvec = 1 base_up = base.elems latt_vec = base.latt repeat = True aux = 0 while repeat: vecs_cells = ut.vec_neig(latt_vec, nvec) # all possible combis of dists = np.array([0.]) # lattice vectors for r in vecs_cells: # Matrix of distances (nx3) A = np.matrix(np.zeros((len(base_up), 3), dtype=float)) for i in range(len(base_up)): #x,y,z in zip(X,Y,Z): elem = base_up[i] A[i, 0] = elem.position[0] A[i, 1] = elem.position[1] A[i, 2] = elem.position[2] # Matrix of distances displaced by vector r (nx3) B = np.matrix(np.zeros((len(base_up), 3), dtype=float)) for i in range(len(base_up)): #x,y,z in zip(X,Y,Z): elem = base_up[i] B[i, 0] = elem.position[0] + r[0] B[i, 1] = elem.position[1] + r[1] B[i, 2] = elem.position[2] + r[2] dists = np.append(dists, num.dists(A, B)) # OJO!! if 2 atoms are eps_dist Angstroms away, they are the same atom # considering that the size of an H atom is 0.5 Angstroms... may be enough # # ordenado contains all the different distances in the problem ordenado = ut.sort(dists, eps=eps_dist) #print '-------',max_neig #print ordenado[0:5] bonds = [] # list of tuples (vector,neigh matrix) for r in vecs_cells: A = np.matrix(np.zeros((len(base_up), 3), dtype=float)) for i in range(len(base_up)): #x,y,z in zip(X,Y,Z): elem = base_up[i] A[i, 0] = elem.position[0] A[i, 1] = elem.position[1] A[i, 2] = elem.position[2] B = np.matrix(np.zeros((len(base_up), 3), dtype=float)) for i in range(len(base_up)): #x,y,z in zip(X,Y,Z): elem = base_up[i] B[i, 0] = elem.position[0] + r[0] B[i, 1] = elem.position[1] + r[1] B[i, 2] = elem.position[2] + r[2] neig = num.vecin(A, B, ordenado, eps_dist) min_neig = neig.min() if min_neig <= max_neig: bonds.append((r, neig)) if aux == len(bonds): # No new neighbours stop looking repeat = False else: # New neighbours check next cells repeat = True nvec += 1 aux = len(bonds) nvec = nvec - 1 # we tried with one more cell and checked # that no new neighbour appeared # bonds, and number of repetitions needed to find all max_neig return bonds, nvec
def add(self, map, transaction, link, p1=None, p2=None, changed=None): # apply the changes collected during the bisect loop to our addlist # return a delta suitable for addrevision def addlistdelta(addlist, x): # start from the bottom up # so changes to the offsets don't mess things up. i = len(x) while i > 0: i -= 1 start = x[i][0] end = x[i][1] if x[i][2]: addlist[start:end] = array.array("c", x[i][2]) else: del addlist[start:end] return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] for d in x]) def checkforbidden(l): for f in l: if "\n" in f or "\r" in f: raise error.RevlogError(_("'\\n' and '\\r' disallowed in filenames")) # if we're using the listcache, make sure it is valid and # parented by the same node we're diffing against if not (changed and self.listcache and p1 and self.mapcache[0] == p1): files = util.sort(map) checkforbidden(files) # if this is changed to support newlines in filenames, # be sure to check the templates/ dir again (especially *-raw.tmpl) hex, flags = revlog.hex, map.flags text = ["%s\000%s%s\n" % (f, hex(map[f]), flags(f)) for f in files] self.listcache = array.array("c", "".join(text)) cachedelta = None else: addlist = self.listcache checkforbidden(changed[0]) # combine the changed lists into one list for sorting work = [[x, 0] for x in changed[0]] work[len(work) :] = [[x, 1] for x in changed[1]] work.sort() delta = [] dstart = None dend = None dline = [""] start = 0 # zero copy representation of addlist as a buffer addbuf = buffer(addlist) # start with a readonly loop that finds the offset of # each line and creates the deltas for w in work: f = w[0] # bs will either be the index of the item or the insert point start, end = self._search(addbuf, f, start) if w[1] == 0: l = "%s\000%s%s\n" % (f, revlog.hex(map[f]), map.flags(f)) else: l = "" if start == end and w[1] == 1: # item we want to delete was not found, error out raise AssertionError(_("failed to remove %s from manifest") % f) if dstart != None and dstart <= start and dend >= start: if dend < end: dend = end if l: dline.append(l) else: if dstart != None: delta.append([dstart, dend, "".join(dline)]) dstart = start dend = end dline = [l] if dstart != None: delta.append([dstart, dend, "".join(dline)]) # apply the delta to the addlist, and get a delta for addrevision cachedelta = addlistdelta(addlist, delta) # the delta is only valid if we've been processing the tip revision if self.mapcache[0] != self.tip(): cachedelta = None self.listcache = addlist n = self.addrevision(buffer(self.listcache), transaction, link, p1, p2, cachedelta) self.mapcache = (n, map) return n
def _show(self, ctx, copies, props): '''show a single changeset or file revision''' changenode = ctx.node() rev = ctx.rev() if self.ui.quiet: self.ui.write("%d:%s\n" % (rev, short(changenode))) return log = self.repo.changelog changes = log.read(changenode) date = util.datestr(changes[2]) extra = changes[5] branch = extra.get("branch") hexfunc = self.ui.debugflag and hex or short parents = [(p, hexfunc(log.node(p))) for p in self._meaningful_parentrevs(log, rev)] self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode))) # don't show the default branch name if branch != 'default': branch = util.tolocal(branch) self.ui.write(_("branch: %s\n") % branch) for tag in self.repo.nodetags(changenode): self.ui.write(_("tag: %s\n") % tag) for parent in parents: self.ui.write(_("parent: %d:%s\n") % parent) if self.ui.debugflag: self.ui.write(_("manifest: %d:%s\n") % (self.repo.manifest.rev(changes[0]), hex(changes[0]))) self.ui.write(_("user: %s\n") % changes[1]) self.ui.write(_("date: %s\n") % date) if self.ui.debugflag: files = self.repo.status(log.parents(changenode)[0], changenode)[:3] for key, value in zip([_("files:"), _("files+:"), _("files-:")], files): if value: self.ui.write("%-12s %s\n" % (key, " ".join(value))) elif changes[3] and self.ui.verbose: self.ui.write(_("files: %s\n") % " ".join(changes[3])) if copies and self.ui.verbose: copies = ['%s (%s)' % c for c in copies] self.ui.write(_("copies: %s\n") % ' '.join(copies)) if extra and self.ui.debugflag: for key, value in util.sort(extra.items()): self.ui.write(_("extra: %s=%s\n") % (key, value.encode('string_escape'))) description = changes[4].strip() if description: if self.ui.verbose: self.ui.write(_("description:\n")) self.ui.write(description) self.ui.write("\n\n") else: self.ui.write(_("summary: %s\n") % description.splitlines()[0]) self.ui.write("\n") self.showpatch(changenode)
def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None): """yields diff of changes to files between two nodes, or node and working directory. if node1 is None, use first dirstate parent instead. if node2 is None, compare node1 with working directory.""" if opts is None: opts = mdiff.defaultopts if not node1: node1 = repo.dirstate.parents()[0] flcache = {} def getfilectx(f, ctx): flctx = ctx.filectx(f, filelog=flcache.get(f)) if f not in flcache: flcache[f] = flctx._filelog return flctx ctx1 = repo[node1] ctx2 = repo[node2] if not changes: changes = repo.status(ctx1, ctx2, match=match) modified, added, removed = changes[:3] if not modified and not added and not removed: return date1 = util.datestr(ctx1.date()) man1 = ctx1.manifest() if repo.ui.quiet: r = None else: hexfunc = repo.ui.debugflag and hex or short r = [hexfunc(node) for node in [node1, node2] if node] if opts.git: copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid]) for k, v in copy.items(): copy[v] = k gone = {} gitmode = {"l": "120000", "x": "100755", "": "100644"} for f in util.sort(modified + added + removed): to = None tn = None dodiff = True header = [] if f in man1: to = getfilectx(f, ctx1).data() if f not in removed: tn = getfilectx(f, ctx2).data() a, b = f, f if opts.git: if f in added: mode = gitmode[ctx2.flags(f)] if f in copy: a = copy[f] omode = gitmode[man1.flags(a)] _addmodehdr(header, omode, mode) if a in removed and a not in gone: op = "rename" gone[a] = 1 else: op = "copy" header.append("%s from %s\n" % (op, a)) header.append("%s to %s\n" % (op, f)) to = getfilectx(a, ctx1).data() else: header.append("new file mode %s\n" % mode) if util.binary(tn): dodiff = "binary" elif f in removed: # have we already reported a copy above? if f in copy and copy[f] in added and copy[copy[f]] == f: dodiff = False else: header.append("deleted file mode %s\n" % gitmode[man1.flags(f)]) else: omode = gitmode[man1.flags(f)] nmode = gitmode[ctx2.flags(f)] _addmodehdr(header, omode, nmode) if util.binary(to) or util.binary(tn): dodiff = "binary" r = None header.insert(0, mdiff.diffline(r, a, b, opts)) if dodiff: if dodiff == "binary": text = b85diff(to, tn) else: text = mdiff.unidiff( to, date1, # ctx2 date may be dynamic tn, util.datestr(ctx2.date()), a, b, r, opts=opts, ) if header and (text or len(header) > 1): yield "".join(header) if text: yield text
def showextras(**args): for key, value in util.sort(changes[5].items()): args = args.copy() args.update(dict(key=key, value=value)) yield self.t('extra', **args)
try: for f, fn in mf.readdelta(n).iteritems(): if not f: err(lr, _("file without name in manifest")) elif f != "/dev/null": fns = filenodes.setdefault(f, {}) if fn not in fns: fns[fn] = i except Exception, inst: exc(lr, _("reading manifest delta %s") % short(n), inst) ui.status(_("crosschecking files in changesets and manifests\n")) if havemf: for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]): err(c, _("changeset refers to unknown manifest %s") % short(m)) del mflinkrevs for f in util.sort(filelinkrevs): if f not in filenodes: lr = filelinkrevs[f][0] err(lr, _("in changeset but not in manifest"), f) if havecl: for f in util.sort(filenodes): if f not in filelinkrevs: try: fl = repo.file(f) lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]]) except:
"locValid", "age", "ageValid", "alt", "altValid", "course", "courseValid", "speed", "speedValid", "rssi", "snr", "freqError", "sf", "isPacket" ] currentDir = os.path.dirname(os.path.abspath(__file__)) data_file = os.path.abspath( os.path.join(currentDir, '..', 'data', "PACKETS.TXT")) output_file_name = "heatmap_SNR.html" if PLOT_SNR else "heatmap_RSS.html" output_file = os.path.abspath( os.path.join(currentDir, '..', 'result', output_file_name)) grid_size = 25 for_map = pd.read_csv(data_file, sep=',', header=None, names=HEADER) for_map = util.sort(for_map) print(for_map) for_map_gps = for_map.copy() for_map = for_map[for_map.isPacket > 0] util.addDistanceTo(for_map, CENTER) util.addPathLossTo(for_map) print(for_map) for_map.plot.scatter(x='distance', y='pl_db', c='sf', colormap='viridis') plt.show() max_lat = for_map[LAT_SERIES].max()
def predict(): # input from web state = str(request.form['state']) bus_name = str(request.form['restaurant']) star1 = str(request.form['star1']) star2 = str(request.form['star2']) star1, star2 = int(star1), int(star2) # selecting model to load depending on star selection from web app affix = None if star1 == 1 and star2 == 2: affix = "12" if star1 == 1 and star2 == 3: affix = "13" if star1 == 1 and star2 == 4: affix = "14" if star1 == 1 and star2 == 5: affix = "15" if star1 == 2 and star2 == 5: affix = "25" if star1 == 3 and star2 == 5: affix = "35" if star1 == 4 and star2 == 5: affix = "45" if affix: with open("../break_week/models/model" + affix + ".pickle", "rb") as f: models = pickle.load(f) # loading data from mongoDB using restaurant name and state db = client.yelp df = pd.DataFrame( list(db.review.find({ "bus_name": bus_name, "state": state }))) # calling display function df_pos, df_neg, name, size, Recall, Precision, Accuracy, lst_neg, \ lst_pos = display(models, df, star1, star2, state=state, bus_name=bus_name) # creating a list of count of words describing each aspect neg_asp_lst = list(df_neg["Aspect"]) pos_asp_lst = list(df_pos["Aspect"]) neg_words, pos_words = [], [] for neg, pos in zip(df_neg["Level of experience"], df_pos["Level of experience"]): neg_words.append(neg) pos_words.append(pos) # using created list to make a dict of aspects data_neg = {"Aspect": neg_asp_lst, "Level of experience": neg_words} data_pos = {"Aspect": pos_asp_lst, "Level of experience": pos_words} # using dict to make bar chart plot_neg = create_bar_chart(data_neg, "Negative customer experience", "Aspect", "Level of experience") plot_pos = create_bar_chart(data_pos, "Positive customer experience", "Aspect", "Level of experience") script_neg, div_neg = components(plot_neg) script_pos, div_pos = components(plot_pos) if len(lst_pos) >= 4 and len(lst_neg) >= 4: neg_asp1, neg_asp2, neg_asp3, neg_asp4, neg_asp5 = lst_neg[0][ 0], lst_neg[1][0], lst_neg[2][0], lst_neg[3][0], lst_neg[4][0] neg_desc1, neg_desc2, neg_desc3, neg_desc4, neg_desc5 = sort( lst_neg[0][1]), sort(lst_neg[1][1]), sort(lst_neg[2][1]), sort( lst_neg[3][1]), sort(lst_neg[4][1]) pos_asp1, pos_asp2, pos_asp3, pos_asp4, pos_asp5 = lst_pos[0][ 0], lst_pos[1][0], lst_pos[2][0], lst_pos[3][0], lst_pos[4][0] pos_desc1, pos_desc2, pos_desc3, pos_desc4, pos_desc5 = sort( lst_pos[0][1]), sort(lst_pos[1][1]), sort(lst_pos[2][1]), sort( lst_pos[3][1]), sort(lst_pos[4][1]) else: return render_template("index.html", title="Home") return render_template("predict.html", bus_name=bus_name, neg_the_div=div_neg, neg_the_script=script_neg, pos_the_div=div_pos, pos_the_script=script_pos, neg_asp1=neg_asp1, neg_asp2=neg_asp2, neg_asp3=neg_asp3, neg_asp4=neg_asp4, neg_asp5=neg_asp5, neg_desc1=neg_desc1, neg_desc2=neg_desc2, neg_desc3=neg_desc3, neg_desc4=neg_desc4, neg_desc5=neg_desc5, pos_asp1=pos_asp1, pos_asp2=pos_asp2, pos_asp3=pos_asp3, pos_asp4=pos_asp4, pos_asp5=pos_asp5, pos_desc1=pos_desc1, pos_desc2=pos_desc2, pos_desc3=pos_desc3, pos_desc4=pos_desc4, pos_desc5=pos_desc5)
def _nonoverlap(d1, d2, d3): "Return list of elements in d1 not in d2 or d3" return util.sort([d for d in d1 if d not in d3 and d not in d2])