def fetch_users(): sql_user = "******" result = db.read(sql_user, cursor) user_list = {} #using dict for faster lookup for i in result: user_list[i[0]]=0 new_user = [] done = 0 total = 1 page = 1 while ( done < total ): user_url = "http://erdos.sdslabs.co/users.json?page=" + str(page) user_r = requests.get(user_url) if(user_r.status_code != 200 ): print user_r.status_code, " returned from ", user_url else: total = user_r.json()['TOTAL'] done += 500 user_res = user_r.json()['list'] for i in user_res: if(i['username'] not in user_list): new_user.append(i['username']) page += 1 if new_user: # print len(new_user) sql = "INSERT INTO user (erd_handle, cfs_handle) VALUES " for i in new_user: sql+="(\'erd"+str(i)+"\',\'cfs\'), " sql=sql[:-2] print sql db.write(sql, cursor, conn) db.write(sql, remote_cursor, remote_conn)
def atkin(number: int, x: int): if x == 1: path = 'one_thr.txt' status = 'Первый процесс окончен' i = 1 elif x == 0: path = "sec_thr.txt" status = 'Второй процесс окончен' i = 2 m_list = [False for _ in range(number + 1)] t = time.time() t = int(t) for x in range(i, int(math.sqrt(number)) + 1, 2): for y in range(1, int(math.sqrt(number)) + 1): n = 4 * x**2 + y**2 if n <= number and (n % 12 == 1 or n % 12 == 5): m_list[n] = not m_list[n] n = 3 * x**2 + y**2 if n <= number and n % 12 == 7: m_list[n] = not m_list[n] n = 3 * x**2 - y**2 if x > y and n <= number and n % 12 == 11: m_list[n] = not m_list[n] if int(time.time()) - t > 3: print(x) t = time.time() t = int(t) db.write(m_list, path) print(status)
def fetch_users(): sql_user = "******" result = db.read(sql_user, cursor) user_list = {} #using dict for faster lookup for i in result: user_list[i[0]] = 0 new_user = [] done = 0 total = 1 page = 1 while (done < total): user_url = base_url + "/users.json?page=" + str(page) user_r = requests.get(user_url) if (user_r.status_code != 200): print user_r.status_code, " returned from ", user_url else: total = user_r.json()['TOTAL'] done += 500 user_res = user_r.json()['list'] for i in user_res: if (i['username'] not in user_list): new_user.append(i['username']) page += 1 if new_user: # print len(new_user) sql = "INSERT INTO user (erd_handle, cfs_handle) VALUES " for i in new_user: sql += "(\'erd" + str(i) + "\',\'cfs\'), " sql = sql[:-2] print sql db.write(sql, cursor, conn) db.write(sql, remote_cursor, remote_conn)
def setprogram(self, program, message = ""): sql = """SELECT program_id, code, name, relative_times, repeat_program, default_program, selected FROM programs WHERE module = {0} AND code = {1}""".format(db.dbstr(self.name()), db.dbstr(program)) programs = db.read(sql, 0, 1) self.program = { ProgramCodes.PROGRAM_ID:programs[0]['program_id'], ProgramCodes.CODE:programs[0]['code'], ProgramCodes.NAME:programs[0]['name'], ProgramCodes.RELATIVE_TIMES:programs[0]['relative_times'], ProgramCodes.REPEAT_PROGRAM:programs[0]['repeat_program'], ProgramCodes.DEFAULT_PROGRAM:programs[0]['default_program'], ProgramCodes.SELECTED:programs[0]['selected'], ProgramCodes.START_TIME:datetime.datetime.now(), ProgramCodes.MESSAGE:message } if self.program[ProgramCodes.REPEAT_PROGRAM]: self.debug("Setting program {0} as selected program".format(self.program[ProgramCodes.CODE])) db.write("UPDATE programs SET selected = 0 WHERE module = {0} AND selected = 1".format(db.dbstr(self.name()))) db.write("UPDATE programs SET selected = 1 WHERE program_id = {0}".format(self.program[ProgramCodes.PROGRAM_ID])) self.onprogramchanged() return True
def readNfc(action): # Option 1: Incomming if (action == 49): print("Arrival:") print("Wave your card!") cardId = read() # Check if card ID is in db db2 = connect() c = db2.cursor() check = c.execute("SELECT *FROM cards WHERE tagId = " + cardId) time.sleep(1) # If card ID in db approve and authorize entry if check: name = db.write(cardId, Actions.incomming) print "You can come in!" else: print "Nope, you are not one of us! Ciao!" logging.info("%s - Arrive", cardId) # Option 2 - Outcomming if (action == 50): print("Going home:") print("Wave your card!") cardId = read() # Check if card ID is in db db2 = connect() c = db2.cursor() check = c.execute("SELECT *FROM cards WHERE tagId = " + cardId) time.sleep(1) # If card ID isin db approve and authorize leave if check: name = db.write(cardId, Actions.outcomming) print "Yup, you can leave." else: print "How did you get in here? Calling security!" logging.info("%s - Leaving", cardId) # Option 3 - Add new card user if (action == 51): print("What is your name:") userId = raw_input('--> ') print("Permision level (1:Master key | 2:Guest)?") permission = raw_input('-->') print("Wave your card!") cardId = read() name = db.writeUser(userId, cardId, permission) print(name) logging.info("Created new card user: %s, Permission: %s", userId, permission)
def _update_chat_status(self, chat_status): sql = (""" UPDATE `recipient` SET chat_status = 10 WHERE id = %s """) db.write(sql, params=(self.recipient_id,))
def on_success(self, data): name = data['user']['screen_name'] last_tweet_id = data['id'] status= "@" + name + " I need to think up of something!" tweet = tweets.tweetout(status, last_tweet_id=last_tweet_id) if tweet: sql = "INSERT into retweets values (?), now() ;" db.write(db.con, sql, tweet['id'])
def add(user): """ Create a new user. """ sql = "INSERT INTO users VALUES (?, ?, ?, ?);" try: db.write(db.con, sql, user) except sqlite3.IntegrityError: # Do nothing when the record already exists pass
def _update_peer_id(self, recipient): """ Update peer_id of specified recipient in the db. """ sql = (""" UPDATE `recipient` SET peer_id = %s WHERE id = %s """) db.write(sql, params=(recipient['peer_id'], recipient['id']))
def __changeStatus(self, new_status, **kwargs): kwargs['owner'] = utils.getOwner() datasets = self.getDataset(**kwargs) dataset = self.printDatasets(datasets) dataset_id = dataset['dataset_id'] sql = 'UPDATE analysis_dataset SET status = %s WHERE dataset_id = %s' db.write(sql, (new_status, r'\x' + dataset_id))
def run(): print "starting temp service..." try: while True: t = ds18b20.gettemp('28-0417504372ff') print t db.write(t) sleep(10) except KeyboardInterrupt, e: logging.info("Starting temp stopping...")
def update_user_score(): sql = "UPDATE user SET cfs_score = \ (SELECT SUM(points/GREATEST("+self.cfs_max_score+", (SELECT MAX(P.points) FROM problem P \ WHERE P.contestId = contestId))) FROM problem WHERE pid IN \ (SELECT DISTINCT(pid) FROM activity WHERE uid = user.uid AND MID(pid,1,3)=\'cfs\' AND status = 1)\ AND points>0)" print sql db.write(sql, cursor, conn)
def deleteDataset(self, **kwargs): '''to delete empty (prepared status) dataset''' kwargs['owner'] = utils.getOwner() kwargs['status'] = ['prepared'] datasets = self.getDataset(**kwargs) dataset = self.printDatasets(datasets) dataset_id = dataset['dataset_id'] sql = 'DELETE FROM analysis_dataset WHERE dataset_id = %s' db.write(sql, (r'\x' + dataset_id, ))
def add(user, gist): """ Create a new idea. """ idea = Idea(gist['id'], user.username, gist['description'], gist['created_at'], None) sql = "INSERT INTO ideas VALUES (?, ?, ?, ?, ?);" try: db.write(db.con, sql, idea) return idea except sqlite3.IntegrityError: return None
def fetch_all_user_activity_cfs(handle=""): ''' | Fetch User's activity from Codeforces | It is different from *fetch_user_activity_cfs()* as it logs each submission as a seperate entry to plot the concept trail ''' difficulty = 0 payload = {} payload['handle'] = handle handle = 'cfs' + handle sql = "SELECT created_at FROM activity_concept WHERE handle = \'" + handle + "\' ORDER BY created_at DESC LIMIT 1;" res = db.read(sql, cursor) if res == (): last_activity = 0 else: last_activity = int(res[0][0]) r = requests.get(cfs_url, params=payload) if (r.status_code != 200): print r.status_code, " returned from ", r.url else: result = r.json()['result'] #profile reverse operation result.reverse() count = 1 sql = "INSERT INTO activity_concept (handle, pid, attempt_count, status, difficulty, created_at) VALUES " for act in result: #checking for min of the 2 values as for some cases, codeforces api is returning absured results for relatice time relative_time = min(7200, int(act['relativeTimeSeconds'])) submission_time = int(act['creationTimeSeconds']) + relative_time if submission_time > last_activity: status = str(act['verdict']) if (status == "OK"): status = "1" else: status = "0" sql += "(\'" + handle + "\', \'cfs" + str( act['problem']['contestId']) + str( act['problem'] ['index']) + "\', '1', " + status + ", " + str( difficulty) + ", " + str(submission_time) + " ), " count += 1 if (count % 5000 == 0): sql = sql[:-2] db.write(sql, cursor, conn) print count, " entries made in the database" sql = "INSERT INTO activity_concept (handle, pid, attempt_count, status, difficulty, created_at) VALUES " else: break # print sql # print count if (sql[-2] == ","): sql = sql[:-2] db.write(sql, cursor, conn)
def fetch_all_user_activity_cfs(handle=""): ''' | Fetch User's activity from Codeforces | It is different from *fetch_user_activity_cfs()* as it logs each submission as a seperate entry to plot the concept trail ''' difficulty = 0 payload = {} payload['handle'] = handle handle = 'cfs' + handle sql = "SELECT created_at FROM activity_concept WHERE handle = \'" + handle + "\' ORDER BY created_at DESC LIMIT 1;" res = db.read(sql, cursor) if res == (): last_activity = 0 else: last_activity = int(res[0][0]) r = requests.get(cfs_url, params=payload) if(r.status_code != 200 ): print r.status_code, " returned from ", r.url else: result = r.json()['result'] #profile reverse operation result.reverse() count = 1 sql = "INSERT INTO activity_concept (handle, pid, attempt_count, status, difficulty, created_at) VALUES " for act in result: #checking for min of the 2 values as for some cases, codeforces api is returning absured results for relatice time relative_time = min(7200, int(act['relativeTimeSeconds'])) submission_time = int(act['creationTimeSeconds']) + relative_time if submission_time > last_activity: status = str(act['verdict']) if(status == "OK"): status = "1" else: status = "0" sql+="(\'" + handle + "\', \'cfs" + str(act['problem']['contestId']) + str(act['problem']['index']) + "\', '1', " + status + ", " + str(difficulty) + ", " + str(submission_time) +" ), " count+=1; if(count%5000 == 0): sql = sql[:-2] db.write(sql, cursor, conn) print count, " entries made in the database" sql = "INSERT INTO activity_concept (handle, pid, attempt_count, status, difficulty, created_at) VALUES " else: break # print sql # print count if(sql[-2] == ","): sql = sql[:-2] db.write(sql, cursor, conn)
def fetch_activity(): # erd_users = fetch_user_list_erd() sql = "SELECT uid, erd_handle FROM user" erd_users = db.read(sql, cursor) for i in erd_users: uid = str(i[0]) handle = str(i[1].encode('utf8')) fetch_user_activity_erd(uid, handle) # print "User activity for " + handle sql = "UPDATE user SET erd_score = \ (SELECT SUM((correct_count-3)/attempt_count) FROM problem WHERE pid IN \ (SELECT DISTINCT(pid) FROM activity WHERE uid = user.uid AND MID(pid,1,3)=\'erd\' AND status = 1)\ AND correct_count>3)" # print sql db.write(sql, cursor, conn)
def calculate_difficulty(self): ''' | Calculate difficulty of a problem for a user ''' sql = "UPDATE activity SET difficulty = 1 WHERE status = 0" db.write(sql, self.cursor, self.conn) sql = "UPDATE activity SET difficulty = 0 WHERE status = 1" db.write(sql, self.cursor, self.conn) # sql = "UPDATE activity SET difficulty = difficulty + 1 WHERE attempt_count <= 2" # db.write(sql, self.cursor, self.conn) # sql = "UPDATE activity SET difficulty = difficulty + 2 WHERE attempt_count >= 3 AND attempt_count <= 5" # db.write(sql, self.cursor, self.conn) # sql = "UPDATE activity SET difficulty = difficulty + 3 WHERE attempt_count > 5;" # db.write(sql, self.cursor, self.conn) self.create_difficulty_matrix()
def fetch_tags_problems(): sql = "SELECT DISTINCT(pid) from ptag" res = db.read(sql, cursor) problem_list = [] for i in res: problem = str(i[0]) if problem not in problem_list: problem_list.append(problem) sql = "SELECT tag from tag" a = db.read(sql, cursor) for i in a: url = "http://codeforces.com/api/problemset.problems" tag = str(i[0].encode('utf8')) payload = {'tags':tag} r = requests.get(url, params=payload) if(r.status_code != 200 ): print r.status_code, " returned from ", r.url continue else: res = r.json() problems = res['result']['problems'] problemStatistics = res['result']['problemStatistics'] sql = "" for j in problems: code = str(j['contestId'])+str(j['index']) code = "cfs"+code if code not in problem_list: sql +="(\'"+code+"\', \'"+tag+"\'), " if(sql!=""): print sql sql = sql[:-2] sql = "INSERT INTO ptag (pid, tag) VALUES " + sql print sql result = db.write(sql, cursor, conn)
def increment_tags(): '''Function to add new tags to db''' fetch_all_tags() new_tags = [] tag_list = [] sql = "SELECT tag from tag" a = db.read(sql, cursor) for i in a: # print i tag = str(i[0].encode('utf8')) tag_list.append(tag) for i in tags.keys(): if tags[i] not in tag_list: new_tags.append(i) if(len(new_tags)>0): sql = "INSERT INTO tag (tag, description, time) VALUES " for i in new_tags: a = str(i).replace('"','\\"') a = a.replace("'","\\'") b = str(tags[i]).replace('"','\\"') b = str(tags[i]).replace("'","\\'") sql+="('" + str(b) + "','" + str(a) + "','" + str(int(time())) + "'), " sql = sql[:-2] result = db.write(sql, cursor, conn)
def send_message(user_id,thread_id,message_text,message_pic): user = get_user(user_id) if not user: return None, ["No such user exists",] thread = get_thread(thread_id) if not thread: return None, ["No such thread exists",] if not str(thread['user_id']) == user_id: return None, ["Sorry, you don't own this thread",] query = "INSERT INTO messages (" vals = [] val_string = "" if message_text: query = query + "message_text," vals.append(message_text) val_string = val_string + "'%s'," if message_pic: query = query + "message_pic," vals.append(message_pic) val_string = val_string + "'%s'," query = query + "thread_id,user_id) VALUES(" + val_string + "'%s','%s')" vals.append(thread_id) vals.append(user_id) print query print vals query = query % tuple(vals) new_message_id = db.write(query) if not new_message_id: return None, [env_constants.MYSQL_WRITING_ERROR,] query = "SELECT * FROM messages WHERE message_id = %s LIMIT 1" % new_message_id new_message = db.read_one(query) return new_message, None
def increment_problem(): '''function to fetch newly added problems''' sql = "SELECT pid FROM `problem` WHERE MID(pid, 1, 3) = \"cfs\"" a = db.read(sql, cursor) problem_list = [] for i in a: pid = str(i[0].encode('utf8')) problem_list.append(pid) pageno = 1 while (increment_problem_from_page(pageno, problem_list) == 0): pageno += 1 if (len(new_problem) > 0): sql = "INSERT INTO problem (pid, name, time) VALUES " for i in new_problem.keys(): j = new_problem[i] a = str(j[0]).replace('"', '\\"') a = a.replace("'", "\\'") b = j[1].encode('utf8') b = str(b).replace('"', '\\"') b = b.replace("'", "\\'") sql += "('" + str(a) + "','" + str(b) + "','" + str(int( time())) + "'), " sql = sql[:-2] result = db.write(sql, cursor, conn)
def start(): result_control = [] result_functional = [] for alpha in alpha_interval: # ищем аппроксимацию # подставляем начальное приближение bvp_solution = calculate_bvp(init_approx, alpha) print('[p1, p2, p3] = %s \n' % bvp_solution) # подбор p1, p2, p3 ivp_solution = calculate_ivp(bvp_solution, alpha) print('lambdaT: %s \n' % ivp_solution[-1]) # решение задачи Коши control = calculate_control(bvp_solution, alpha) print('Control: %s \n' % control) # вычисление угловой скорости-управления result_control.append(control) functional = calculate_functional(control, alpha) print('Functional: %s \n' % functional) # вычисление функционала result_functional.append(functional) result_control_bspline = make_interp_spline(alpha_interval, result_control, k=3) result_control_new = result_control_bspline(alpha_interval) result_functional_bspline = make_interp_spline(alpha_interval, result_functional, k=3) result_functional_new = result_functional_bspline(alpha_interval) # строим график изменения кватерниона во времени plot.draw_ivp_and_control(ivp_solution, result_control_new, time, alpha_interval) plot.draw_fuctional(result_functional_new, alpha_interval) results = bvp_solution.tolist(), ivp_solution[-1].tolist() ivp_and_control_charts = plot.open('resources/ivp_and_control.png') functional_chart = plot.open('resources/functional.png') charts = ivp_and_control_charts, functional_chart collection = db.connect() # подключаемся к серверу бд db.write(collection, results, charts) # делаем запись в коллекцию cursor = db.read(collection, {}) # поиск for record in cursor: print(record['data']) # читаем все записи из коллекции
def rate_history(message): request = message.text res = re.search(r'/history ([A-Z]{3})/([A-Z]{3}) for ([0-9]{1,3}) days$', request) if not res: bot.send_message(message.chat.id, "Wrong format\nEX: /history USD/CAD for 7 days") return cur1, cur2, time = res.group(1), res.group(2), res.group(3) data = db.get_request(cur1 + cur2 + time, datetime.datetime.now() - TIME_THRESHOLD) if not data: start_date = datetime.datetime.now().date() - datetime.timedelta( days=int(time)) end_date = datetime.datetime.now().date() r = requests.get( 'https://api.exchangeratesapi.io/history?start_at={}&end_at={}&base={}&symbols={}' .format(start_date, end_date, cur1, cur2)) if r.status_code == 400: bot.send_message(message.chat.id, "No such currency") return elif not r.status_code == 200: bot.send_message(message.chat.id, "Excange api connection error") return data = r.text db.write(cur1 + cur2 + time, data, datetime.datetime.now()) try: data = json.loads(data) except json.JSONDecodeError: bot.reply_to(message.chat.id, "Response decode error") return plot = history_plot(sorted([(k, v.get(cur2)) for k, v in data['rates'].items()], key=lambda x: x[0]), cur1=cur1, cur2=cur2) if plot: buf = io.BytesIO() with buf: plot.savefig(buf, format='png') buf.seek(0) bot.send_photo(message.chat.id, buf) plot.close()
def xapian_simul(): if (psutil.cpu_count(logical=True) != psutil.cpu_count(logical=False)): print('[SYS] Hyperthreading is enabled, disabling...') aux.htcontrol(ht) print('[SYS] Hyperthreading disabled!') for ufreq in ufreqs: print('[SYS] Disabling all CPUs for new run, except the first one...') aux.disableall() freqAdjust = "sudo cpupower frequency-set -f " + ufreq os.system(freqAdjust) for icpu in cpus: if (icpu != 0): aux.enable(icpu) for iqps in qps: print("[SYS] Running Xapian for " + str(icpu) + "cpus, " + str(iqps) + " qps " + " and frequency " + ufreq) consumedEnergy = tailbench.xapian(icpu, iqps) db.write(ufreq, icpu, iqps, consumedEnergy)
def get_uid(self): ''' | Get uid from database using his erdos handle and also set cfs_handle if provided ''' sql = "SELECT uid, cfs_handle FROM user WHERE erd_handle = \'" + self.erd_handle + "\'" res = db.read(sql, self.cursor) if not res: sql = "INSERT INTO user (erd_handle, cfs_handle, erd_score, cfs_score) VALUES ( \'" + self.erd_handle + "\', \'" + self.cfs_handle + "\', '0', '0')" db.write(sql, self.cursor, self.conn) sql = "SELECT uid FROM user WHERE erd_handle = \'" + self.erd_handle + "\'" uid = db.read(sql, self.cursor) self.fill_activity(uid[0][0]) return uid[0][0] else: if self.cfs_handle != res[0][1] : sql = "UPDATE user SET cfs_handle = \'" + self.cfs_handle + "\'" db.write(sql, self.cursor, self.conn) return res[0][0]
def create_handler_cards(params): card = { 'answer': params['answer'], 'question': params['question'], 'date': params['date'], 'listId': params['list_id'] } card['id'] = db.write('INSERT INTO cards(answer, question, date, listId) VALUES("' + card['answer'] + '", "' + card['question'] + '", "' + card['date'] + '", "' + str(card['listId']) + '")') return response(200, json.dumps(card))
def fetch_user_list_cfs(): ''' | Fetch List of all the users from Codeforces ''' cfs_users = [] url = "http://codeforces.com/api/user.ratedList?activeOnly=true" r = requests.get(url) if(r.status_code != 200 ): print r.status_code, " returned from ", r.url else: result = r.json()['result'] for i in result: cfs_users.append(i['handle']) sql = "SELECT uid FROM user WHERE cfs_handle = \'" + i['handle'] + "\'" res = db.read(sql, cursor) if not res: sql = "INSERT INTO user (erd_handle, cfs_handle, erd_score, cfs_score) VALUES ( 'erd', \'cfs" + i['handle'] + "\', '0', \'" + str(i['rating']) + "\')" db.write(sql, cursor, conn) return cfs_users
def list_rates(message): data = db.get_request('list', datetime.datetime.now() - TIME_THRESHOLD) if not data: r = requests.get('https://api.exchangeratesapi.io/latest?base=USD') if not r.status_code == 200: bot.send_message(message, "Excange api connection error") return data = r.text db.write('list', data, datetime.datetime.now()) try: data = json.loads(data) except json.JSONDecodeError: bot.send_message(message, "Response decode error") return if data.get('rates'): reply = ''.join('- {}: {:8.2f}\n'.format(k, v) for k, v in data.get('rates').items() if k != "USD") bot.send_message(message.chat.id, reply)
def insert_all_tags(): '''Function to insert all the tags in the db''' sql = "INSERT INTO tag (tag, description, time) VALUES " for i in tags.keys(): a = str(i).replace('"','\\"') a = a.replace("'","\\'") b = str(tags[i]).replace('"','\\"') b = str(tags[i]).replace("'","\\'") sql+="('" + str(b) + "','" + str(a) + "','" + str(int(time())) + "'), " sql = sql[:-2] result = db.write(sql, cursor, conn)
def update_tag_count(): '''Function to update count of problems for each tag in the db''' sql = "SELECT tag FROM tag" a = db.read(sql, cursor) # sql = "" for i in a: tag = str(i[0].encode('utf8')) sql = "UPDATE tag SET count = (SELECT COUNT(*) FROM ptag WHERE tag = \'" + str(tag) + "\' ) WHERE tag = \'" + tag + "\'" #todo - optimise this sql # print sql result = db.write(sql, cursor, conn)
def fetch_user_activity_erd(uid="", handle=""): ''' | Fetch User's activity from Erdos ''' url = "http://erdos.sdslabs.co/activity/users/" + handle[3:] + ".json" print url sql = "SELECT MAX(created_at) FROM activity WHERE handle = \'" + handle + "\'" print sql res = db.read(sql, cursor) # print res if res[0][0] is None: last_activity = 0 else: last_activity = int(res[0][0]) # last_activity = int(last_activity) payload = {} payload['start_time'] = last_activity r = requests.get(url, params = payload) if(r.status_code != 200 ): print r.status_code, " returned from ", r.url else: result = r.json()['list'] result.reverse() for act in result: if int(act['created_at']) > last_activity: sql = "SELECT pid FROM activity WHERE pid = \'erd" + act['problem_id'] + "\' AND handle = \'" + handle + "\'" check = db.read(sql, cursor) difficulty = 0 if check == (): sql = "INSERT INTO activity (handle, pid, attempt_count, status, difficulty, uid, created_at) VALUES ( \'" + handle + "\', \'erd" + act['problem_id'] + "\', '1', " + str(act['status']) + ", " + str(difficulty) + ", " + uid + ", " + str(act['created_at']) + " )" db.write(sql, cursor, conn) p = problem("erd" + act['problem_id']) if p.exists_in_db != -1: tag_data = p.tag for tag in tag_data: sql = "SELECT tag FROM user_tag_score WHERE tag = \'" + tag + "\' AND handle = \'" + handle + "\'" tag_check = db.read(sql, cursor) if tag_check == (): sql = "INSERT INTO user_tag_score (handle, tag, score) VALUES ( \'" + handle + "\' , \'" + tag + "\' , " + str(tag_data[tag]) + " )" db.write(sql, cursor, conn) else: sql = "UPDATE user_tag_score SET score = score +" + str(tag_data[tag]) + " WHERE tag = \'" + tag + "\' AND handle = \'" + handle + "\'" db.write(sql, cursor, conn) else: sql = "UPDATE activity SET attempt_count = attempt_count + 1, status = " + str(act['status']) + ", difficulty = " + str(difficulty) + ", created_at = " + str(act['created_at']) + " WHERE pid = \'erd" + act['problem_id'] + "\' AND handle = \'" + handle + "\'" db.write(sql, cursor, conn) print sql
def fetch_user_activity_cfs(handle=""): ''' | Fetch User's activity from Codeforces ''' cfs_url = "http://codeforces.com/api/user.status" payload = {} payload['handle'] = handle handle = 'cfs' + handle sql = "SELECT created_at FROM activity WHERE handle = \'" + handle + "\' ORDER BY created_at DESC LIMIT 1;" res = db.read(sql, cursor) if res == (): last_activity = 0 else: last_activity = res[0][0] last_activity = int(last_activity) r = requests.get(cfs_url, params=payload) if(r.status_code != 200 ): print r.status_code, " returned from ", r.url else: result = r.json()['result'] result.reverse() for act in result: if int(act['creationTimeSeconds']) > last_activity: sql = "SELECT pid FROM activity WHERE pid = \'cfs" + str(act['problem']['contestId']) + str(act['problem']['index']) + "\' AND handle = \'" + handle + "\'" check = db.read(sql, cursor) difficulty = 0 if act['verdict'] == "OK": status = 1 else: status = 0 if check == (): sql = "INSERT INTO activity (handle, pid, attempt_count, status, difficulty, created_at) VALUES ( \'" + handle + "\', \'cfs" + str(act['problem']['contestId']) + str(act['problem']['index']) + "\', '1', " + str(status) + ", " + str(difficulty) + ", " + str(act['creationTimeSeconds']) +" )" db.write(sql, cursor, conn) p = problem("cfs" + str(act['problem']['contestId']) + str(act['problem']['index'])) if p.exists_in_db != -1: tag_data = p.tag for tag in tag_data: sql = "SELECT tag FROM user_tag_score WHERE tag = \'" + tag + "\' AND handle = \'" + handle + "\'" tag_check = db.read(sql, cursor) if tag_check == (): sql = "INSERT INTO user_tag_score (handle, tag, score) VALUES ( \'" + handle + "\' , \'" + tag + "\' , " + str(tag_data[tag]) + " )" db.write(sql, cursor, conn) else: sql = "UPDATE user_tag_score SET score = score + " + str(tag_data[tag]) + " WHERE tag = \'" + tag + "\' AND handle = \'" + handle + "\'" db.write(sql, cursor, conn) else: sql = "UPDATE activity SET attempt_count = attempt_count + 1, status = " + str(status) + ", difficulty = " + str(difficulty) + ", created_at = " + str(act['creationTimeSeconds']) + " WHERE pid = \'cfs" + str(act['problem']['contestId']) + str(act['problem']['index']) + "\' AND handle = \'" + handle + "\'" db.write(sql, cursor, conn)
def create_thread(user_id): #First check if user is valid user = get_user(user_id) if not user: return None, ["No such user exists",] #query = "SELECT * FROM message_threads WHERE user_id = %s ORDER BY thread_id DESC LIMIT 1" % user_id query = "INSERT INTO message_threads (user_id,thread_name) VALUES ('%s','%s')" % (user_id,user['display_name']) new_thread_id = db.write(query) if not new_thread_id: return None, [env_constants.MYSQL_WRITING_ERROR,] #query = "SELECT * FROM message_threads WHERE thread_id = %s LIMIT 1" % new_thread_id #new_thread = db.read_one(query) new_thread = get_thread(new_thread_id) return new_thread, None
def setprogram(self, program): sql = """SELECT program_id, code, relative_times, repeat_program FROM programs WHERE module = {0} AND code = {1}""".format(db.dbstr(self.name()), db.dbstr(program)) programs = db.read(sql, 0, 1) self.program = { 'program_id':programs[0]['program_id'], 'code':programs[0]['code'], 'relative_times':programs[0]['relative_times'], 'repeat_program':programs[0]['repeat_program'] } if self.program['repeat_program']: self.debug("Setting program {0} as selected program".format(self.program['code'])) db.write("UPDATE programs SET selected = 0 WHERE module = {0} AND selected = 1".format(db.dbstr(self.name()))) db.write("UPDATE programs SET selected = 1 WHERE program_id = {0}".format(self.program['program_id'])) self.looprequest = True # TODO: Raise exception? Or return False for invalid program? return True
def _log_chat_history_db(msg): """ Insert message into db for recording """ if msg.text: msg_type = 'SENT' if msg.own else 'RECEIVED' recipient_phone = msg.receiver.phone if msg.own else msg.sender.phone sql = (""" INSERT INTO chat_history (`message_type`, `recipient_id`, `text`, `event_id`) VALUES ( %s, (SELECT r.id FROM recipient r WHERE r.phone = %s ORDER BY r.id DESC LIMIT 1), %s, (SELECT r.event_id FROM recipient r WHERE r.phone = %s ORDER BY r.id DESC LIMIT 1) ) """) db.write(sql, params=(msg_type, recipient_phone, msg.text, recipient_phone))
def insert_all_problems(): '''Function to insert problems in db''' sql = "INSERT INTO problem (pid, name, points, correct_count, time, contestId) VALUES " for j in precise: i = precise[j] a = str(i[0].encode('utf8')).replace('"','\\"') a = a.replace("'","\\'") b = i[1].encode('utf8') b = str(b).replace('"','\\"') b = b.replace("'","\\'") c = str(int(i[2])) d = str(i[4]) e = str(i[3].encode('utf8')) sql+="('" + str(a) + "','" + str(b) + "','" + str(c) + "','" + str(d) + "','" + str(int(time())) + "', '" + str(e) + "'), " sql = sql[:-2] # print sql result = db.write(sql, cursor, conn)
def update_problem(): fetch_all_problems() sql = "INSERT INTO problem (pid, name, points, correct_count, time) VALUES " for j in precise.keys(): i = precise[j] a = str(i[0]).replace('"','\\"') a = a.replace("'","\\'") b = i[1].encode('utf8') b = str(b).replace('"','\\"') b = b.replace("'","\\'") b = b.replace(",","\,") c = str(int(i[2])) d = str(i[3]) sql+="('" + str(a) + "','" + str(b) + "','" + str(c) + "','" + str(d) + "','" + str(int(time())) + "'), " sql = sql[:-2] sql+="ON DUPLICATE KEY UPDATE points=VALUES(points),correct_count=VALUES(correct_count),time=VALUES(time);" print sql result = db.write(sql, cursor, conn)
def signup(user_name,display_name,email,password): #Check for existing user query = "SELECT * FROM users WHERE email = '%s' LIMIT 1" % email existing_user = db.read_one(query) if existing_user: return None, ["This email is already registered with us. If you forgot your password, please reset it.",] query = "SELECT * FROM users WHERE user_name = '%s' LIMIT 1" % user_name existing_user = db.read_one(query) if existing_user: return None, ["This user name is not available. Please select a new one"] #Okay great! Register now password = py_helpers.hash_password(password) print "password: "******"INSERT INTO users (user_name,display_name,email,password) VALUES('%s','%s','%s','%s')" % (user_name,display_name,email,password) new_id = db.write(query) if not new_id: return None, [env_constants.MYSQL_WRITING_ERROR,] query = "SELECT user_id,user_name,display_name,email,t_create,t_update FROM users WHERE user_id = %s LIMIT 1" % new_id user_info = db.read_one(query) return user_info, None
def increment_problem(): '''function to fetch newly added problems''' sql = "SELECT pid FROM `problem` WHERE MID(pid, 1, 3) = \"cfs\"" a = db.read(sql, cursor) problem_list = [] for i in a: pid = str(i[0].encode('utf8')) problem_list.append(pid) pageno = 1 while(increment_problem_from_page(pageno, problem_list)==0): pageno+=1 if(len(new_problem)>0): sql = "INSERT INTO problem (pid, name, time) VALUES " for i in new_problem.keys(): j = new_problem[i] a = str(j[0]).replace('"','\\"') a = a.replace("'","\\'") b = j[1].encode('utf8') b = str(b).replace('"','\\"') b = b.replace("'","\\'") sql+="('" + str(a) + "','" + str(b) + "','" + str(int(time())) + "'), " sql = sql[:-2] result = db.write(sql, cursor, conn)
def set_tweetid(idea, tid): """ Set the tweetid of the idea to the passed value. """ sql = "UPDATE ideas SET tweetid = (?) WHERE gistid = (?);" db.write(db.con, sql, (tid, idea.gistid))
def master_configure(self): '''This method creates the tar.bz2 archive of user sw directory. Such a method is called one time per master job''' logger.debug('SBApp master_configure called.') self.now = datetime.datetime.now().strftime("%Y%m%d") self.os_arch = os.environ['SBROOT'].split('/')[-1] self.user_id = utils.getOwner() j = self.getJobObject() # check the target SE status using gridmon DB (updated by nagios monitoring system) sql = 'SELECT se_host, nagios_test_service FROM se WHERE name_grid = %s' local_SE = db.gridmon(sql, (getConfig('SuperB')['submission_site'], )) if local_SE[0]['nagios_test_service'] == 'CRITICAL': raise GangaException('Local storage element %s is down.' % local_SE[0]['se_host']) # logger.error('Local storage element %s seems died for gridmon.' % local_SE[0]['se_host']) #else: # logger.error('Local storage element %s is back alive for gridmon. !! uncomment exception !!' % local_SE[0]['se_host']) # create the software directory if self.software_dir != '': if not os.path.isdir(self.software_dir): raise ApplicationConfigurationError(None, 'software_dir must be a directory.') # make the tar file and update sw_archive parameter self.software_dir = os.path.normpath(self.software_dir) (head, tail) = os.path.split(self.software_dir) self.filename = tail self.sw_archive = os.path.join(j.inputdir, tail + '.tar.bz2') logger.info('Creating archive: %s ...', self.sw_archive) logger.info('From: %s', head) logger.info('Of: %s', tail) #savedir = os.getcwd() #os.chdir(self.software_dir) #retcode = subprocess.call("tar -cjf %s * 2>/dev/null" % self.sw_archive, shell=True) retcode = subprocess.call("tar -cjf %s -C %s %s 2>/dev/null" % (self.sw_archive, head, tail), shell=True) if retcode < 0: raise ApplicationConfigurationError(None, 'Error %d while creating archive.' % retcode) #os.chdir(savedir) else: raise ApplicationConfigurationError(None, 'software_dir cannot be empty.') if self.executable == '': raise ApplicationConfigurationError(None, 'executable cannot be empty.') # checking that j.inputdata is a valid object if not isinstance(j.inputdata, (SBInputDataset.SBInputPersonalProduction, SBInputDataset.SBInputProductionAnalysis, SBInputDataset.SBInputPureAnalysis)): msg = 'j.inputdata %s is not allowed' % str(type(j.inputdata)) raise ApplicationConfigurationError(None, msg) # checking that j.inputdata (the input dataset) is a valid dataset j.inputdata.check() # checking that j.outputdata (the output dataset) is valid if isinstance(j.outputdata, SBOutputDataset.SBOutputDataset): j.outputdata.check() # creating temp dataset self.temp_dataset = str(objectid.ObjectId()) free_string = '%s_%s_%s' % (j.id, j.name, self.filename) sql = '''INSERT INTO analysis_dataset (owner, dataset_id, session, parameters, status) VALUES (%s, decode(%s, 'hex'), %s, %s, 'temp'); INSERT INTO analysis_dataset_site (dataset_id, site) VALUES (decode(%s, 'hex'), %s);''' params = (utils.getOwner(), self.temp_dataset, 'analysis', {'free_string': free_string}, self.temp_dataset, getConfig('SuperB')['submission_site']) db.write(sql, params) # merger j.merger = TextMerger() j.merger.files.extend(['severus.log', 'output_files.txt']) j.merger.ignorefailed = True j.merger.compress = True j.splitter = SBSubmission.SBSubmission() return (0, None)
def createDataset(self): '''Interactive method to guide the user in dataset creation procedure. If the dataset is a 'personal production' type, force user to provide a filter key.''' def asksParameter(parameter): '''Interactive method requesting user the value of each parameter per session (FastSim, FullSim, Analysis)''' if parameter['customValue'] and len(parameter['values']) == 0: value = raw_input('\nEnter %s: ' % parameter['label']) elif not parameter['customValue'] and len(parameter['values']) == 0: raise GangaException('Invalid rule (customValue:False and values=0).') else: table = list() i = 0 for value in parameter['values']: table.append({'id': i, 'value': value}) i += 1 if parameter['customValue']: table.append({'id': i, 'value': 'Enter a custom value'}) print('\nChoose %s:' % parameter['label']) column_names = ('id', 'value') print(utils.format_dict_table(table, column_names)) index = utils.getIndex(maxExclusive=len(table)) if parameter['customValue'] and index == len(table)-1: value = raw_input('Custom value: ') else: value = table[index]['value'] # parameter insertion in dictionary. It will be subsequently #inserted into dataset analysis bookkeeping table, hstore field new_dataset['parameters'][parameter['name']] = value return value type = [ dict(id = 0, dataset_type = 'FastSim Personal Production'), dict(id = 1, dataset_type = 'FullSim Personal Production'), dict(id = 2, dataset_type = 'Analysis'), ] column_names = ('id', 'dataset_type') print(utils.format_dict_table(type, column_names)) index = utils.getIndex(maxExclusive=len(type)) new_dataset = dict() new_dataset['parameters'] = dict() #################### # FAST Simulation session #################### # parameter check: mandatory, free string param management # TODO: parameter type check, evaluate the config file option to store parameters if index == 0: new_dataset['session'] = 'fastsim' parameters = [ {"name": "evt_file", "label": "Events per file", "customValue": True, "values": []}, {"name": "analysis", "label": "Analysis", "customValue": True, "values": ["BtoKNuNu", "BtoKstarNuNu", "DstD0ToXLL", "DstD0ToXLL", "Generics", "HadRecoilCocktail", "KplusNuNu", "SLRecoilCocktail", "tau->3mu"]}, {"name": "dg", "label": "Geometry", "customValue": True, "values": ["DG_4", "DG_4a", "DG_BaBar"]}, {"name": "generator", "label": "Generator", "customValue": True, "values": ["B0B0bar_Btag-HD_Cocktail", "B0B0bar_Btag-SL_e_mu_tau_Bsig-HD_SL_Cocktail", "B0B0bar_generic", "B0B0bar_K0nunu", "B0B0bar_K0nunu_SL_e_mu_tau", "B0B0bar_Kstar0nunu_Kpi", "B0B0bar_Kstar0nunu_Kpi_SL_e_mu_tau", "B+B-_Btag-HD_Cocktail", "B+B-_Btag-SL_e_mu_tau_Bsig-HD_SL_Cocktail", "B+B-_generic", "B+B-_K+nunu", "B+B-_K+nunu_SL_e_mu_tau", "B+B-_Kstar+nunu", "B+B-_Kstar+nunu_SL_e_mu_tau", "B+B-_taunu_SL_e_mu_tau", "bhabha_bhwide", "ccbar", "tau+tau-_kk2f", "uds", "udsc", "Upsilon4S_generic"]}, {"name": "bkg_mixing", "label": "Background Mixing Type", "customValue": True, "values": ["All", "NoPair", "NoMixing"]}, {"name": "analysis_type", "label": "Analysis Type", "customValue": True, "values": ["BtoKNuNu", "BtoKstarNuNu", "HadRecoil", "SemiLepKplusNuNu"]} ] for parameter in parameters: asksParameter(parameter) #################### # FULL Simulation session #################### elif index == 1: new_dataset['session'] = 'fullsim' parameters = [ {"name": "evt_file", "label": "Events per file", "customValue": True, "values": []}, {"name": "sim_type", "label": "Simulation Type", "customValue": False, "values": ["fullsim", "background_frame"]}, {"name": "generator", "label": "Generator", "customValue": False, "values": ["RadBhaBha", "singleparticle"]}, {"name": "dg", "label": "Geometry", "customValue": True, "values": ["Geometry_CIPE", "Geometry_CIPE_BGO", "Geometry_CIPE_CSI", "Geometry_CIPE_V00-00-02"]}, {"name": "pl", "label": "Physics list", "customValue": True, "values": ["QGSP", "QGSP_BERT", "QGSP_BERT_HP"]}, {"name": "g4ver", "label": "Geant 4 version", "customValue": True, "values": ["9.2", "9.3"]}, {"name": "opt_photons", "label": "Optical Photons", "customValue": False, "values": ["OFF", "ON"]} ] radbhabha = [ {"name": "brunobbbminde", "label": "Min. Delta E", "customValue": True, "values": []} ] singleParticle = [ {"name": "brunopdg", "label": "PDG Code", "customValue": True, "values": []}, {"name": "brunothetamin", "label": "Theta min.", "customValue": True, "values": []}, {"name": "brunothetamax", "label": "Theta max.", "customValue": True, "values": []}, {"name": "brunophimin", "label": "Phi min.", "customValue": True, "values": []}, {"name": "brunophimax", "label": "Phi max.", "customValue": True, "values": []}, {"name": "brunoemin", "label": "Energy (GeV) min.", "customValue": True, "values": []}, {"name": "brunoemax", "label": "Energy (GeV) max.", "customValue": True, "values": []} ] for parameter in parameters: value = asksParameter(parameter) # parameter dependencies management if parameter['name'] == 'generator': if value == 'singleparticle': parameters.extend(singleParticle) elif value == 'RadBhaBha': parameters.extend(radbhabha) #################### # ANALYSIS session #################### elif index == 2: new_dataset['session'] = 'analysis' else: raise GangaException('Invalid selection.') while True: free_string = raw_input('\nEnter free string: ') max_length = 128 if len(free_string) <= max_length: new_dataset['parameters']['free_string'] = free_string break else: print('Free string must be <= %d char long.' % max_length) # dataset-site relation set new_dataset['site'] = getConfig('SuperB')['submission_site'] new_dataset['owner'] = utils.getOwner() new_dataset['dataset_id'] = str(objectid.ObjectId()) print('\nNew dataset details:') self.printDatasetDetail(new_dataset) value = '' while True: value = raw_input('Type \'yes\' to confirm the dataset creation or (q)uit: ') if value == 'yes': break elif value == 'q': raise utils.QuitException() sql = '''INSERT INTO analysis_dataset (owner, dataset_id, session, parameters, status) VALUES (%s, decode(%s, 'hex'), %s, %s, 'prepared'); INSERT INTO analysis_dataset_site (dataset_id, site) VALUES (decode(%s, 'hex'), %s);''' params = (new_dataset['owner'], new_dataset['dataset_id'], new_dataset['session'], new_dataset['parameters'], new_dataset['dataset_id'], new_dataset['site']) db.write(sql, params)
def parse(filename): count = 0 urlId = 0 headlineId = 0 timeCombId = 0 timeId = 0 timeCombId = 0 stime_curr = "" stime_curr_timestamp = 0 etime_curr = "" etime_curr_timestamp = 0 timeCombmap = {} Time = {} headline_sha1 = {} url_sha1 = {} sql_default_headline = "INSERT INTO headline (headlineId, headline) VALUES " sql_default_url = "INSERT INTO url (urlId, url) VALUES " sql_default_time = "INSERT INTO time (timeId, time) VALUES " sql_default_mapping = "INSERT INTO mapping (urlId, headlineId, timeCombId, sourceId) VALUES " sql_default_timeComb = "INSERT INTO timeComb (timeCombId, startDate, endDate) VALUES " sql_headline = sql_default_headline sql_url = sql_default_url sql_time = sql_default_time sql_mapping = sql_default_mapping sql_timeComb = sql_default_timeComb flag_url = 0 flag_headline = 0 flag_time = 0 flag_mapping = 1 #as mapping will be generated every time flag_timeComb = 0 parser = open(filename, "r") conn = db.connect('news') cursor=conn.cursor() for i in parser: if (count%MOD == 0): sql_headline = sql_default_headline sql_url = sql_default_url sql_time = sql_default_time sql_mapping = sql_default_mapping sql_timeComb = sql_default_timeComb flag_url = 0 flag_headline = 0 flag_time = 0 flag_timeComb = 0 line = i.split(",") temp_headline = str( (line[1]).replace("\"","\\\"").replace("\'","\\\'").strip() ) temp_url = str( (line[5]).replace("\"","\\\"").replace("\'","\\\'").strip() ) temp_url_sha1 = hashlib.sha1(temp_url).hexdigest() temp_headline_sha1 = hashlib.sha1(temp_headline).hexdigest() if temp_url_sha1 not in url_sha1.keys(): flag_url = 1 urlId+=1 url_sha1[temp_url_sha1] = str(urlId) sql_url+="(\""+str(urlId)+"\" , \""+temp_url+ "\"), " if temp_headline_sha1 not in headline_sha1.keys(): flag_headline = 1 headlineId+=1 headline_sha1[temp_headline_sha1] = str(headlineId) sql_headline+="(\""+str(headlineId)+"\" , \""+temp_headline + "\"), " stime = str(line[2]).strip() if stime!=stime_curr: stime_curr = stime stime = int(date_to_timestamp(stime)) stime_curr_timestamp = stime if stime not in Time.keys() : timeId+=1 flag_time = 1 Time[stime]=str(timeId) sql_time+= "(\""+str(timeId)+"\" , \"" + str(stime) + "\"), " else: stime=stime_curr_timestamp etime = str(line[3]).strip() if(etime!=etime_curr): etime_curr = etime etime = int(date_to_timestamp( etime )) etime_curr_timestamp = etime if etime not in Time.keys() : timeId+=1 flag_time = 1 Time[etime]=str(timeId) sql_time+= "(\""+str(timeId)+"\" , \"" + str(etime) + "\"), " else: etime=etime_curr_timestamp key = str(stime)+str(etime) if key not in timeCombmap.keys(): timeCombId+=1 flag_timeComb = 1 timeCombmap[key] = str(timeCombId) sql_timeComb+= "(\""+str(timeCombId)+"\", \""+Time[stime]+"\", \""+Time[etime]+"\"), " temp_sourceId = str(line[4]).strip() sql_mapping+= "(\""+url_sha1[temp_url_sha1] +"\" , \"" + headline_sha1[temp_headline_sha1] + "\", \"" + timeCombmap[key] + "\", \" " + temp_sourceId + "\"), " count+=1 if (count%MOD == 0): if(flag_url == 1): sql_url = sql_url[:-2] db.write(sql_url, cursor, conn) if(flag_headline == 1): sql_headline = sql_headline[:-2] db.write(sql_headline, cursor, conn) if(flag_time == 1): sql_time = sql_time[:-2] db.write(sql_time, cursor, conn) if(flag_timeComb == 1): sql_timeComb = sql_timeComb[:-2] db.write(sql_timeComb, cursor, conn) if(flag_mapping == 1): sql_mapping = sql_mapping[:-2] db.write(sql_mapping, cursor, conn) print count," lines inserted into the DB" if(flag_url == 1): sql_url = sql_url[:-2] db.write(sql_url, cursor, conn) if(flag_headline == 1): sql_headline = sql_headline[:-2] db.write(sql_headline, cursor, conn) if(flag_time == 1): sql_time = sql_time[:-2] db.write(sql_time, cursor, conn) if(flag_timeComb == 1): sql_timeComb = sql_timeComb[:-2] db.write(sql_timeComb, cursor, conn) if(flag_mapping == 1): sql_mapping = sql_mapping[:-2] db.write(sql_mapping, cursor, conn) print count," lines inserted into the DB" cursor.close()
number_to_recommend = 5 conn = db.connect() cursor = conn.cursor() remote_conn = db.connect('remote') remote_cursor = remote_conn.cursor() sql = "SELECT (correct_count)/(attempt_count) as difficulty FROM problem \ WHERE MID(pid,1,3) = \"erd\" AND attempt_count>5" erd_problem_difficulty = db.read(sql, cursor) sql = "SELECT uid, erd_score/(SELECT MAX(erd_score) FROM user), cfs_score/(SELECT MAX(cfs_score) FROM user) FROM user " user_result = db.read(sql, cursor) sql = "UPDATE problem_reco SET is_deleted = 1" db.write(sql, cursor, conn) sql = "CREATE table IF NOT EXISTS problem_reco_new LIKE problem_reco" db.write(sql, remote_cursor, remote_conn) sql = "SELECT pid FROM problem WHERE MID(pid, 1, 3)='erd' " problem_result = db.read(sql, cursor) count = 0 sql = "INSERT INTO problem_reco_new (uid, base_pid, status, reco_pid, score, time_created, time_updated, state, is_deleted) VALUES " for i in problem_result: pid = str(i[0]) a = problem(pid = pid, erd_problem_difficulty = erd_problem_difficulty, conn = conn, cfs_max_score = cfs_max_score, lower_threshold = lower_threshold, upper_threshold = upper_threshold, number_to_recommend = number_to_recommend, batchmode = 1) for j in user_result: