def A_motif(chord, bars, *tweaks): motif = {} if 'no treble' in tweaks: motif['treble'] = rep(rest(1), bars) elif chord == self.aI: motif['treble'] = triplet_bar(pattern(chord, [6, 5]), bars=bars) elif chord == self.aii and 'low triplets' in tweaks: motif['treble'] = triplet_bar(pattern(chord, [5, 4]), bars=bars) else: motif['treble'] = triplet_bar(pattern(chord, [6, 4]), bars=bars) motif['bass1'] = doublet_bar(pattern(chord, 2, 3), bars=bars) if 'crotchet bass' in tweaks: motif['bass2'] = rep(note(select(chord, 1), 4), int(bars * 4)) else: motif['bass2'] = rep(note(select(chord, 1), 1, phrasing="~"), bars) if 'extend tie' not in tweaks: motif['bass2'][-1].phrasing = "" if 'low first' in tweaks: motif['bass1'][0] = chord[0] motif['bass2'] = self.transpose(motif['bass2'], -9, "scale") return motif
def verify_change_request(account_id, pwd_reset_key, old_pwd, new_pwd): valid_request = False stored_pwd_reset_key = util.select('PwdReset',('pwd_reset_key'),{'account_id':account_id}, classname=PasswordManager.__name__)[0] stored_old_pwd = util.select('PwdReset',('pwd'),{'account_id':account_id}, classname=PasswordManager.__name__)[0] if stored_pwd_reset_key == pwd_reset_key and stored_old_pwd == old_pwd: PasswordManager.set_pwd(account_id, new_pwd) valid_request = True return valid_request
def motif(self, c): tones = tonify(c) return { 'treble': rep(rest(8) + notes(pattern(tones, 2 * [3, 4, 5]), 16), 2), 'bass': rep( voices( rest(16) + tied_note(select(tones, 2), ['8.', 4]), note(select(tones, 1), 2)), 2) }
def get_classes(account_id): """ @:parameter account_id of instructor or student account @:returns class_ids of Class objects associated with account_id """ account = Account.get_account(account_id) if account.account_type == 'instructor': db_read = util.select('Class', ('class_id'), {'account_id':account_id},classname=Class.__name__) class_ids = [int(r[0]) for r in db_read] else: db_read = util.select('ClassList',('class_id'),{'account_id':account_id},classname=Class.__name__) class_ids = [int(r[0]) for r in db_read] return class_ids
def get_assignment_definition(assignment_def_id): db_read = util.select( 'Assignment_definition', ('account_id','title','instructions'), {'assignment_def_id':assignment_def_id}, classname=AssignmentDefinition.__name__ ) account_id, title, instructions = db_read[0] db_read = util.select( 'Class_assignments', ('class_id'), {'assignment_def_id':assignment_def_id}, classname=AssignmentDefinition.__name__ ) class_id = db_read[0][0] ad = AssignmentDefinition(account_id,title,instructions,class_id) ad.assignment_def_id = assignment_def_id
def remove(assignment_def_id): answer_key_id = util.select('Answer_key',('answer_key_id'),{'assignment_def_id':assignment_def_id})[0][0] fields = Field.get_fields(answer_key_id) for field in fields: field.remove(answer_key_id) util.delete('Answer_key',{'answer_key_id':answer_key_id})
def GET(self): params = web.input(limit=20, offset=0) res2 = util.select('qlist JOIN users ON qlist.uid = users.id', what='qlist.id AS id, users.name AS name, qlist.opt1 AS opt1, qlist.opt2 AS opt2', limit=params.limit, order='qlist.id DESC', offset=params.offset) res = [r for r in res2] if len(res) < 1 and params.offset>0: raise status.ApiError('403 Invalid Page') raise status.ApiReturn('templates/qlist', res)
def login(): if request.method == 'GET': return render_template('login.html') else: #passs username = request.form.get('username') passwd = request.form.get('passwd') users = {} users['username'] = username users['passwd'] = passwd print users, type(users) userinfo = select(users) print userinfo, type(userinfo) #return "xyz" if userinfo: if userinfo[2] == passwd: userinfos = chaxun() return render_template('index.html', users=userinfos) else: err_info = "passwd error" return render_template('login.html', err_info=err_info) else: #b = "用户名不存在" err_info = "username not exisit" return render_template('login.html', err_info=err_info)
def get_answer_key(answer_key_id): results = util.select('Answer_key',('assignment_def_id','answer_key_fpath'),{'answer_key_id':answer_key_id}, classname='AnswerKey') (assignment_def_id, answer_key_fpath) = results[0] answer_key = AnswerKey(assignment_def_id,answer_key_fpath) answer_key.fields = Field.get_fields(answer_key_id) return answer_key
def remove_assignments_for_student(account_id): aids = util.select('Assignment', ('assignment_id'), {'account_id': account_id}, classname=Assignment.__name__) for aid in aids: SubmissionFile.remove_files(aid) nbr_rows_del = util.delete('Assignment',{'account_id':account_id},classname=Assignment.__name__) util.log_info('Deleted {} assignments where account_id={}'.format(nbr_rows_del,account_id))
def populate_contactgroups(db_from, db_to, contacts, contactgroups): members = 0 for contactgroup in contactgroups: where = "contactgroup_id={}".format( contactgroup["prev_contactgroup_id"]) # Get from db members of an old contact group contactgroupmembers = [ cgm for cgm in util.select(db_from, "contactgroupmembers", where) ] # Put users from previous contacts groups into new ones for contactgroupmember in contactgroupmembers: contactgroupmember["contactgroup_id"] = contactgroup[ "contactgroup_id"] try: contactgroupmember["contact_id"] = get_prev_contact_key( contactgroupmember["contact_id"], contacts) except Exception, exc: logging.exception( "Error updating contact group membership of %d in %s", contactgroupmember["contact_id"], contactgroup["contactgroup_id"]) raise else: util.insert(db_to, "contactgroupmembers", contactgroupmember) members += len(contactgroupmembers)
def possibly_centered_sampler(prior, likelihood_fn, proposal, observations, mask, initial_particles=None): ''' Wrap an independent importance sampler to be a masked sampler. Args: <Same as `IS`> mask: A (B, N) Tensor. Returns: particles: A (S, B, N, dz) Tensor. log_weights: A (S, B, N) Tensor. ''' if (not resample_jointly) and (initial_particles is None): particles = proposal.sample(1)[0] replicated = tf.tile(tf.expand_dims(particles[0], 0), [tf.shape(particles)[0], 1, 1, 1]) replicated.set_shape(particles.shape) mask = tf.expand_dims(mask, -1) # (B, N, 1) initial_particles = util.select(mask, particles, replicated) return independent_sampler(prior=prior, likelihood_fn=likelihood_fn, proposal=proposal, observations=observations, initial_particles=initial_particles)
def login(): if request.method == 'GET' : #code_str = gene_code() #session['code'] = code_str.lower() return render_template('login.html') else : users = request.form.to_dict() userinfo = select(users) if userinfo : print "+++" *10 print "users:",users,type(users) if passwd_hash.check_password(userinfo,users) : if users.get('code').lower() == session.get('code') : #del session['code'] render_template不会重新执行,code不能删除 session['uid'] = userinfo.get('uid') #定义全局变量 #g.role = userinfo['role'] session['role'] = userinfo['role'] #31天内不需要登录 session.permanent = True return redirect(url_for('index')) else : err_info = "验证码错误" return render_template('login.html',err_info = err_info) else : err_info = "passwd error" err_info = "密码错误" return render_template('login.html',err_info = err_info) else: err_info = "用户名不存在" return render_template('login.html', err_info = err_info)
def remove_all_for_instructor(account_id): db_read = util.select( 'Assignment_definition', ('assignment_def_id'),{'account_id':account_id}, classname=AssignmentDefinition.__name__ ) adef_ids = [int(r[0]) for r in db_read] for aid in adef_ids: AssignmentDefinition.remove(aid)
def chords(self): bar = [''] * 40 bar[1] = self.arpeggio('c`', 'e``') bar[2] = omit(arpeggio7('c`', 'f``', 'D Minor'), 3, 5) bar[3] = omit(dominant7('b', 'f``', 'G Major'), 3, 5) bar[4] = deepcopy(bar[1]) bar[5] = omit(arpeggio('c`', 'a``', 'A Minor'), 4) bar[6] = ['c`'] + arpeggio('d`', 'd``', 'D Major') bar[7] = self.transpose(bar[5], -1) bar[8] = ['b'] + self.arpeggio('c`', 'c``') bar[9] = omit(arpeggio7('a', 'c``', 'A Minor'), 5) bar[10] = select(dominant7('d', 8, 'D Major'), 1, 3, 5, 6, 8) bar[11] = arpeggio('g', 5, 'G Major') bar[12] = select(diminished7('g', 7, 'G Minor'), 1, 2, 4, 5, 7) bar[13] = omit(arpeggio('f', 'd``', 'D Minor'), 4) bar[14] = omit(diminished7('f', 'cf``', 'D Minor'), 3, 6) bar[15] = self.transpose(bar[13], -1) bar[16] = omit(arpeggio7('e', 'f`', 'F Major'), 5) bar[17] = omit(arpeggio7('d', 'f`', 'D Minor'), 5) bar[18] = omit(dominant7('g,', 'f`', 'G Major'), 2, 4, 7) bar[19] = self.arpeggio('c', 'e`') bar[20] = omit(self.dominant7('c', 'e`'), 2) bar[21] = omit(arpeggio7('f,', 'e`', 'F Major'), 2, 3, 4) bar[22] = omit(diminished7('gf,', 'ef`', 'A Major'), 2, 4, 5) bar[23] = 'af, f b c` d`' bar[24] = omit(dominant7('g,', 'd`', 'G Major'), 2, 3) bar[25] = omit(self.arpeggio('g,', 'e`'), 2) bar[26] = 'g, d g c` f`' bar[27] = omit(dominant7('g,', 'f`', 'G Major'), 2, 4, 7) bar[28] = ['g,'] + omit(diminished7('ef', 'gf`', 'A Major'), 2, 5) bar[29] = omit(self.arpeggio('g,', 'g`'), 2, 6) bar[30] = deepcopy(bar[26]) bar[31] = deepcopy(bar[27]) bar[32] = omit(self.dominant7('c,', 'e`'), 2, 3, 4, 6, 9) return bar
def position_variation(m, rs): u, d, l, r = smp.room(m[0]["field"]) ml = [[dy, dx] for dy in range(-u, d + 1) for dx in range(-l, r + 1)] n = len(ml) mtr, mts = utl.select(ml, [n - 1, 1], rs) return [ [scp.slide_history(m, x) for x in ml], #return [[scp.slide_history(m, x) for x in mtr], [scp.slide_history(m, x) for x in mts] ]
def create_chords(self): self.aI = self.arpeggio(self.key.root, 6) self.aiii = [ self.transpose(t, -1) if letter(t) == self.key.root else t for t in self.aI ] self.aiii7 = [ self.transpose(t, 1) if letter(t) == self.key.root else t for t in self.aI ] self.aii = self.transpose(self.aI, 1) self.aii7 = self.transpose(select(self.aii, 1), -1) + subset( self.aii, 2, 6) self.bI7 = self.arpeggio(self.transpose(self.key.root, -1, 'octave'), 4) + self.arpeggio7( self.transpose(self.key.root, 9), 4) self.biii = arpeggio(self.transpose(self.key.root, -8), 4, key=self.IIIt) self.biii += arpeggio(self.transpose(self.key.root, 9), 4, key=self.IIIt) self.biii7 = [ self.transpose(t, i) for t, i in zip(self.bI7, [1, 0, 0, 1, -1, 0, 0, 0]) ] self.bii7 = self.transpose(subset(self.bI7, 1, 4), 1) + self.transpose( subset(self.bI7, 5, 7), -1) + select(self.bI7, 8) self.bii7d5 = [ self.transpose(t, i, 'semitone') for t, i in zip(self.bii7, [0, 0, -1, 0, 0, 0, -1, 0]) ] self.diii = self.transpose( self.transpose(self.arpeggio(self.key.root, 3), 2), 1, 'octave') self.dI = [self.transpose(self.key.root, 2, 'octave')] self.dii = self.transpose( [self.key.root, self.transpose(self.IIt.v, -1, "semitone")], 2, 'octave') + [self.transpose(self.key.v, 2, 'octave')]
def get_all_assignment_definitions(class_id): db_read = util.select( 'Class_assignments', ('assignment_def_id'), {'class_id':class_id}, classname=AssignmentDefinition.__name__ ) adefs = [] for row in db_read: adef_id = row[0] adefs.append(AssignmentDefinition.get_assignment_definition(adef_id)) return adefs
def add_row(self): i = self.rows dropdown = bokeh.models.Dropdown(menu=self.menu, label="Model/observation", width=150) dropdown.on_click(select(dropdown)) dropdown.on_change('value', self.on_dropdown(i)) group = bokeh.models.CheckboxButtonGroup(labels=self.labels) group.on_change("active", self.on_radio(i)) self.groups.append(group) row = bokeh.layouts.row(dropdown, group) self.column.children.insert(-1, row)
def GET(self, pid): params = web.input(limit=20, offset=0) qx = util.select_one('qlist JOIN users ON qlist.uid = users.id', where='qlist.id=$id', vars={'id': pid}) if qx is None: raise status.ApiError('401 Invalid Question') res2 = util.select('alist JOIN qlist ON qlist.id = alist.qid JOIN users ON qlist.uid = users.id', where='qid=$id', limit=params.limit, offset=params.offset, order='alist.id DESC', vars={'id': pid}) res = [r for r in res2] if len(res) < 1 and params.offset>0: raise status.ApiError('403 Invalid Page') sess = util.get_sess() raise status.ApiReturn('templates/question', qx, res)
def get_assignments(assignment_def_id): db_read = util.select( 'Assignments', ('assignment_id','account_id','answer_key_id','open_datetime','due_datetime','graded','grade','marked_up_fpath'), {'assignment_def_id':assignment_def_id} ) assignments = [] for r in db_read: (assignment_id,account_id,answer_key_id,open_datetime,due_datetime,graded,grade,marked_up_fpath) = r a = Assignment(account_id,assignment_def_id,answer_key_id,open_datetime,due_datetime) a.assignment_id = assignment_id assignments.append(a) return assignments
def get_class(class_id: int): """ :param class_id: int :returns Class object associated with class_id""" result = util.select( 'Class', ('account_id','class_key','title','semester','start_date','end_date'), {'class_id':class_id}, classname='Class' )[0] account_id, class_key, title, semester, start_date, end_date = result class_obj = Class(account_id,class_key,title,semester,start_date,end_date) class_obj.class_id = class_id return class_obj
def get_submission_files(assignment_id): values = [] db_read = util.select( 'Submission_files', ('submission_id','submission_fpath','submission_datetime'), {'assignment_id':assignment_id}, classname=SubmissionFile.__name__ ) for r in db_read: (submission_id, submission_fpath, submission_datetime) = r sf = SubmissionFile(assignment_id,submission_fpath,submission_datetime) sf.assignment_id = assignment_id values.append(sf) return values
def get_fields(answer_key_id): fields = [] results = util.select( 'Field', ('field_id','x','y','xdim','ydim','field_type'), {'answer_key_id':answer_key_id}, classname=Field.__name__ ) for r in results: (field_id, x, y, xdim, ydim, field_type) = r field = Field(answer_key_id,x,y,xdim,ydim,field_type) field.field_id = field_id fields.append(field) return fields
def get_all_assignments(account_id): db_read = util.select( 'Assignment', ('assignment_id','assignment_def_id','answer_key_id','open_datetime','due_datetime','graded','grade','marked_up_file_path'), {'account_id':account_id}, classname=Assignment.__name__ ) assignments = [] for row in db_read: assignment_id, assignment_def_id, answer_key_id, open_dt, due_dt, graded, grade, marked_up_file_path = row a = Assignment(account_id,assignment_def_id,answer_key_id,open_dt,due_dt) a.assignment_id = assignment_id a.graded, a.grade, a.marked_up_fpath = graded, grade, marked_up_file_path assignments.append(a) return assignments
def dengl(): if request.method == 'GET': return render_template('denglu.html') elif request.method == 'POST': username = request.form.get('username') passwd = request.form.get('passwd') usera = select(username, passwd) if usera == "pr": a = "passwd error" return render_template('denglu.html', a=a) elif usera == "ur": b = "username is error" return render_template('denglu.html', b=b) else: return render_template('log.html', usera=usera)
def propagate_local_histories(self, graph, global_context, local_histories): ''' Args: global_context: A (..., dz) Tensor. local_histories: A (..., N, dH) Tensor. Returns: correlated_context: A (..., N, dH) Tensor. ''' correlated_histories = self._trans_gnn(graph=graph, states=local_histories, global_states=global_context) mask = tf.expand_dims(graph.center_mask, axis=-1) return util.select(mask, correlated_histories, local_histories)
def transfer_identities(db_from, db_to, where, user_id): # Search for identities in database identities = [i for i in util.select(db_from, "identities", where)] for identity in identities: # Update user reference identity["user_id"] = user_id # Keep track of last id for identity prev_identity_id = identity["identity_id"] del identity["identity_id"] # Insert identity in destination db identity["identity_id"] = util.insert(db_to, "identities", identity) return identities
def transfer_contacts(db_from, db_to, where, user_id): # Search for contacts in database contacts = [c for c in util.select(db_from, "contacts", where)] for contact in contacts: # Update user reference contact["user_id"] = user_id # Keep track of last id for contact prev_contact_id = contact["contact_id"] del contact["contact_id"] # Insert contact in destination db contact["contact_id"] = util.insert(db_to, "contacts", contact) contact["prev_contact_id"] = prev_contact_id return contacts
def find_subset_pair(n): s = list(range(n)) cmp_cnt = 0 for i in range(len(s) // 2): subset = select(i + 1, s) l = len(subset) for j in range(l - 1): s1 = subset[j] s1.sort() for k in range(j + 1, l): s2 = subset[k] if check_overlap(s1, s2): continue s2.sort() if check_pair_to_cmp(s1, s2): cmp_cnt += 1 return cmp_cnt
def remove(account_id): account = Account.get_account(account_id) util.delete('Pwd_reset',{'account_id':account_id},classname=Account.__name__) if account.account_type == 'instructor': db_read = util.select('Class',('class_id'),{'account_id':account_id},classname=Account.__name__) class_ids = [r[0] for r in db_read] for cid in class_ids: student_account_ids = ClassList.get_student_account_ids(cid) for sid in student_account_ids: Assignment.remove_assignments_for_student(sid) ClassList.remove_class(cid) Class.remove(cid) AssignmentDefinition.remove_all_for_instructor(account_id) else: ClassList.remove_student_from_class(account_id) assignments = Assignment.get_all_assignments(account_id) for a in assignments: a.remove_assignments_for_student(account_id) util.delete('Account',{'account_id':account_id},classname=Account.__name__)
def __init__(self): self.plus = bokeh.models.Button(label="+", width=80) self.plus.on_click(self.on_plus) self.minus = bokeh.models.Button(label="-", width=80) self.minus.on_click(self.on_minus) self.modes = bokeh.models.Dropdown(label="Time step", menu=[("Time step", "Time step"), ("Model run", "Model run")], width=80) self.modes.on_click(select(self.modes)) self.modes.on_click(self.on_mode) sizing_mode = "fixed" self.layout = bokeh.layouts.row( bokeh.layouts.column(self.minus, width=90, sizing_mode=sizing_mode), bokeh.layouts.column(self.modes, width=100, sizing_mode=sizing_mode), bokeh.layouts.column(self.plus, width=90, sizing_mode=sizing_mode), width=300) super().__init__()
def GET(self, rid): room = util.select_one('rooms', where='id=$rid', vars={'rid': rid}) house = util.select_one('houses', where='id=$hid', vars={'hid': room.house_id}) coms = util.select('room_com', where='room_id=$rid', vars={'rid': rid}) raise status.ApiReturn('templates/room_detail', house, room, list(coms))
def GET(self, hid): house = util.select_one('houses', where='id=$hid', vars={'hid': hid}) hcom = util.select('house_com', where='house_id=$hid', vars={'hid': hid}) rooms = util.select('rooms', where='house_id=$hid', vars={'hid': hid}, order='room_no ASC') raise status.ApiReturn('templates/house_detail', house, list(hcom), list(rooms))
def GET(self): houses = util.select('houses') raise status.ApiReturn('templates/house_list', houses)
def do_analyze(self, args_str): parser = self._get_arg_parser() parser.add_argument("-o", "--output", metavar="FILE", dest="output", help="specific output dir or file"), parser.add_argument("-t", "--threads", type=int, dest="threads", default=multiprocessing.cpu_count(), help="threads number to work [default equal cpu count]") parser.add_argument("--plot-all", action="store_true", dest="plot_all", default=False, help="plot all stocks, not only good ones") parser.add_argument('codes', nargs='*') options = self._parse_arg(parser, args_str) if not options: return schemes = [] user_options = [] for k, v in self.config['analyzing']['schemes'].items(): schemes.append(v) user_options.append(v['desc']) select = util.select(user_options, 'please select a scheme used for analyzing') config = schemes[select]['config'] logging.info('analyzer config:\n%s' % yaml.dump(config)) if not self.loaded: self.do_load() stocks = {} if len(options.codes): for code in options.codes: if code in self.dm.stocks: stocks[code] = self.dm.stocks[code] else: logging.error('unknown stock %s', code) else: stocks = self.dm.stocks if not len(stocks): logging.error('no stocks found in local database, please run \'load\' command first') return analyzer = Analyzer(stocks, self.dm.indexes, config) logging.info('all %d available stocks will be analyzed' % len(analyzer.stocks)) logging.info('-----------invoking data analyzer module-------------') analyzer.analyze(threads=options.threads) logging.info('-------------------analyze done----------------------') list = [] for result in analyzer.good_stocks: stock = result.stock list.append({'code': stock.code, 'name': stock.name, 'price': stock.price, 'pe': stock.pe, 'nmc': stock.nmc / 10000, 'mktcap': stock.mktcap / 10000, 'toavgd5': '%.2f%%' % stock.get_turnover_avg(5), 'toavgd30': '%.2f%%' % stock.get_turnover_avg(30), 'area': stock.area, 'industry': stock.industry }) df = DataFrame(list) if df.empty: logging.info('no good stocks found') return logging.info('list of good %d stocks%s:' % (len(analyzer.good_stocks), options.output and ' and save plots to %s' % options.output or '')) print(df.to_string( columns=('code', 'name', 'price', 'pe', 'nmc', 'mktcap', 'toavgd5', 'toavgd30', 'area', 'industry'))) logging.info('global market status: %s' % analyzer.global_status) if options.output: logging.info('generating html report...') os.makedirs(options.output, exist_ok=True) analyzer.generate_report(options.output, only_plot_good=not options.plot_all) logging.info('done')
# loads config file from config import auth # login or report error try: client = SoundcloudClient(auth) except Abort as a: print(a.msg()) sys.exit(1) def ppr(s): """pretty result of executing the soundcloud API request s""" pp(client.r(s)) group = client.my_groups()[0] pp(select(group, ["name","id"])) t = "artwork_url,bpm,comment_count,genre,id,playback_count,tag_list,title" t = t.split(',') for track in client.pending(group): try: pp(select(track,t)) client.download(track, "pending") except Abort as a: print(a.msg()) print(a.cause()) print("skipping track %s" % track['title'])