def send_issue_mail(id,new=True): try: issue=get_issue(get_db(),id) print issue if not issue: flash("Failed to mail about this issue since we failed to obtain issue by id %s"%str(id),'error') reporter_email=get_email(get_db(),issue['reporter']) owner_email=get_email(get_db(),issue['owner']) if reporter_email==owner_email: recipients=[reporter_email] else: recipients=[reporter_email,owner_email] if new: subject='New issue: %s'%issue['short_text'] else: subject='Updated issue: %s'%issue['short_text'] html=""" See <a href="%s">%s</a> """ % ( url_for("issue",id=id,_external=True), url_for("issue",id=id,_external=True) ) message=Message(subject=subject,recipients=recipients,html=html) mail.send(message) except Exception as e: print 'send_issue_mail caught',e
def insert_order(id_str, dup_count, subject, reg_id, reg_date, chapter, bill, precis, pub_date, url, links, acts, departments): with get_db() as cur: cur.execute("""INSERT INTO orders(id, order_count, subject, reg_id, reg_date, chapter, bill, precis, pub_date, url) SELECT %s, %s, %s, %s, %s, %s, %s, %s, %s, %s where not exists (select 1 from orders where id = %s and order_count = %s);""", ( id_str, dup_count, subject, reg_id, reg_date, chapter, bill, precis, pub_date, url, id_str, dup_count ) ) cur.connection.commit() for link in links: scrape_attachment(id_str, dup_count, link) for act in acts: act_str = act.text.strip() act_str = re.sub( '\s\s+', u' ', act_str ) with get_db() as cur: cur.execute("""INSERT INTO acts(order_id, order_count, act) SELECT %s, %s, %s where not exists (select 1 from acts where order_id =%s and order_count = %s and act = %s);""", ( id_str, dup_count, act_str, id_str, dup_count, act_str )) cur.connection.commit() for dept in departments: department = dept.text.strip() with get_db() as cur: cur.execute("""INSERT INTO departments(order_id, order_count, department) SELECT %s, %s, %s where not exists (select 1 from departments where order_id =%s and order_count = %s and department = %s);""", ( id_str, dup_count, department, id_str, dup_count, department )) cur.connection.commit()
def process_case(row=None, db=None, refresh=True, tree=None): if not tree: tree = html.fromstring(row.get('document')) tree = process_case_links(tree, db) with (db or get_db()).cursor() as cur: query = """UPDATE documents d SET processed_document = %(doc)s WHERE d.id = %(id)s """ cur.execute(query, { 'id': row.get('id'), 'doc': etree.tostring(tree, encoding='UTF-8', method="html"), }) (db or get_db()).commit() return tree
def process_instrument(document_ids): with server.app.test_request_context(): db = get_db() title_store = links.get_links(db) def get_links(tree, db): return links.process_instrument_links(tree, db, links=title_store) with db.cursor(cursor_factory=extras.RealDictCursor) as cur: for document_id in document_ids: query = """SELECT *, exists(select 1 from latest_instruments where id=i.id) as latest FROM instruments i JOIN documents d on d.id = i.id where processed_document is null and i.id = %(id)s """ cur.execute(query, {'id': document_id}) result = cur.fetchall() if len(result): queries.process_instrument( row=result[0], db=db, refresh=False, latest=result[0].get('latest'), strategy={'links': get_links}) db.commit() db.close() return 'done'
def index(): db = get_db() nodes = db.execute( 'SELECT id, ip, role_id, created, updated, status ' 'FROM node where deleted = 0' ).fetchall() return jsonify({'status': 1, 'nodes': [dict(node) for node in nodes]})
def get_node_info(id): info = get_db().execute( 'SELECT n.ip, n.username, n.password, r.name AS role_name ' 'FROM node n JOIN role r ON n.role_id = r.id WHERE n.id = ?', (id,) ).fetchone() return dict(info)
def create(): if request.method == 'POST': ip = request.json['ip'] username = request.json['username'] password = request.json['password'] role_id = request.json['role_id'] role_name = request.json['role_name'] logger.debug(request.json) try: db = get_db() cursor = db.cursor() cursor.execute( "INSERT INTO node (ip, username, password, role_id, status, deleted) VALUES (?, ?, ?, ?, 'created', 0)", (ip, username, password, role_id) ) role_id = cursor.lastrowid db.commit() cursor.close() add_host(role_name, ip) return jsonify({'status': 1, "role_id": role_id}) except Exception as ex: logger.error(ex) logger.error(traceback.format_exc()) return jsonify({'status': -1, 'error': ex.message})
def index(): """Show all the roles""" db = get_db() roles = db.execute( 'SELECT id, name FROM role' ).fetchall() return jsonify({'status': 1, 'roles': [dict(role) for role in roles]})
def update_user( user, userid ): role = request.form.get("role", "") description = request.form.get("description", "") neighborhood = request.form.get("neighborhood", "") if userid != user["userid"]: return "403", 403 try: # Open a cursor to perform database operations conn = db.get_db() cur = conn.cursor() sql_query = "UPDATE users SET role = %s, bio = %s, neighborhood = %s WHERE userid = %s;" sql_data = ( role, description, neighborhood, userid ) cur.execute( sql_query, sql_data ) conn.commit() cur.close() except db.psycopg2.DatabaseError, e: # if I have a connection print e if conn: conn.rollback()
def vote(): db = get_db() cursor = db.cursor() try: data = request.get_json() except ValueError: cursor.close() return response_2 try: post_id = data['post'] vote = data['vote'] except (KeyError): cursor.close() return response_2 if vote == 1: query = get_like elif vote == -1: query = get_dislike else: cursor.close() return response_2 try: cursor.execute(query, [post_id]) db.commit() except DatabaseError: cursor.close() return response_4 response = make_response_for_post(post_id, cursor, []) return make_response(0, response)
def listUsers(): short_name = request.args.get('forum') if(short_name == None): return response_2 try: db = get_db() except DatabaseError: return response_4 cursor = db.cursor() id = Get_forum_id(short_name, cursor) if id == -1: cursor.close() return response_1 since = Optional_sience_id(request) add_order = optional_Order(request) add_limit = optional_Limit(request) try: cursor.execute(user_id_with_posts_on_this_forum + add_order + add_limit, [id, since]) list_id = cursor.fetchall() except DatabaseError: cursor.close() return response_4 list_user = [] for user_id in list_id: list_user.append(make_response_for_user(user_id[0], cursor)) cursor.close() return make_response(0, list_user)
def exchange_rates(): ''' { name: 'USDBRL', series: [43934, 52503, 57177, 69658, 97031, 119931, 137133] categories: [ "2018-05-18", "2018-05-17", "2018-05-16", "2018-05-15", "2018-05-14", "2018-05-13", "2018-05-12" ] } ''' args = parser.parse_args() dates = get_previous_dates(7) all_dates = [] for currency in filter( lambda x: x in args['currency'], AVAILABLE_FROM_TO_CURRENCIES ): payload = {"name": currency, "data": []} for date in dates: data = get_db().get(compose_redis_key("USD", currency, date)) if data is None: raise Exception('Database not populated yet!') payload["data"].append(json.loads(data)) all_dates.append(payload) return jsonify({"series": all_dates, "categories": dates})
def update(): if(request.method == 'GET'): return response_2 db = get_db() cursor = db.cursor() try: data = ujson.loads(request.data) except ValueError: cursor.close() return response_2 try: post_id = data['post'] message = data['message'] except (KeyError): cursor.close() return response_2 try: cursor.execute(update_post,[message, post_id]) db.commit() except DatabaseError: cursor.close() return response_4 response = make_response_for_post(post_id, cursor,[]) cursor.close() return make_response(0, response)
def create(): if(request.method == 'GET'): return make_response(2, "no valid maethod") try: db = get_db() except DatabaseError: return response_4 cursor = db.cursor() try: data = request.get_json() except ValueError: cursor.close() return response_2 try: name = data['name'] short_name = data['short_name'] user = data['user'] except (KeyError): cursor.close() return response_2 id = Get_user_id(user, cursor) if id == -1: cursor.close() return response_1 try: cursor.execute(create_forum, [name, short_name, id]) db.commit() except DatabaseError: cursor.close() return response_4 response = make_response_for_forum_by_short_name(short_name,cursor,[]) cursor.close() return make_response(0, response)
def unsubscribe(): try: data = request.get_json() except ValueError: return response_2 try: thread = data['thread'] user = data['user'] except (KeyError): return response_2 db = get_db() cursor = db.cursor() id = Get_subscribe_id(user, thread, cursor) if id == -1: cursor.close() return response_1 try: cursor.execute(get_unsubscribe, [id]) db.commit() except DatabaseError: cursor.close() return response_4 response = { "thread": thread, "user": user } cursor.close() return make_response(0, response)
def subscribe(): if(request.method == 'GET'): return response_2 try: data = request.get_json() except ValueError: return response_2 try: thread = data['thread'] user = data['user'] except (KeyError): return response_2 db = get_db() cursor = db.cursor() try: cursor.execute(get_subscribe, [user, thread]) db.commit() except DatabaseError: cursor.close() return response_4 response = { "thread": thread, "user": user } #cursor.close() return make_response(0, response)
def listPosts(): short_name = request.args.get('forum') if(short_name == None): return response_2 try: db = get_db() except DatabaseError: return response_4 cursor = db.cursor() related_list = request.args.getlist('related') id = Get_forum_id(short_name, cursor) if id == -1: cursor.close() return make_response(1, "Forum not found") since = Optional_sience_date(request) add_order = optional_Order(request) add_limit = optional_Limit(request) try: cursor.execute(list_id_post +add_order + add_limit,[id, since]) list_id = cursor.fetchall() except (DatabaseError): cursor.close() return response_4 list_post = [] for post_id in list_id: list_post.append(make_response_for_post(post_id, cursor, related_list)) cursor.close() return make_response(0, list_post)
def get( userid=None ): payload = [] try: conn = db.get_db() cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) sql_query = """SELECT * FROM message WHERE subject = %s ORDER BY created DESC; """ sql_data = (userid, ) cur.execute( sql_query, sql_data ) payload = cur.fetchall() cur.close() except Exception, e: print e payload = []
def send( subject=None, author=None, message=None ): if subject == None or author == None or message == None: return False try: # Open a cursor to perform database operations conn = db.get_db() cur = conn.cursor() sql_query = "INSERT INTO message (subject, author, message) VALUES (%s, %s, %s);" sql_data = ( subject, author, message ) cur.execute( sql_query, sql_data ) conn.commit() cur.close() except db.psycopg2.DatabaseError, e: # if I have a connection print e if conn: conn.rollback() return False
def db(self): """Return a db instance if databases option is defined""" self._condition.acquire() if not hasattr(self, '_db') or self._db is None: self._db = get_db() return self._db
def close(): if(request.method == 'GET'): return response_2 try: db = get_db() except DatabaseError: return response_4 cursor = db.cursor() try: data = request.get_json() except ValueError: cursor.close() return response_2 try: id = data['thread'] except (KeyError): cursor.close() return response_2 if(cursor.execute(get_thread_id, [id]) == 0 ): return response_1 try: cursor.execute(close_thread,[id]) db.commit() except DatabaseError: cursor.close() return response_4 cursor.close() return make_response(0, {"thread": id})
def get(self): db = get_db() docs = list( db.dares.find( { 'method': 'tpr', 'public': True, }, sort=(('_id', -1),), limit=100 ) ) dares = [] if docs: for x in xrange(4): dares.append(random.choice(docs)) dares = [dict(y) for y in set(tuple(x.items()) for x in dares)] processed = [] for dare in dares: name_list = dare['name'].title().split() if len(name_list) is 1: namestr = "%s." % name_list[0] elif len(name_list) >= 2: namestr = "%s %s." % (name_list[0], name_list[-1][:1]) processed.append({ 'name': namestr, 'dare': 'I dare to %s.' % dare['dare'] }) return {'dares': processed}
def fix_cycles(db=None): """ remove cycles from relationship heirarchy, just to be safe """ db = db or get_db() # find and remove cycles with db.cursor(cursor_factory=extras.RealDictCursor) as cur: query = """ WITH RECURSIVE search_graph(child_id, parent_id, depth, path, cycle) AS ( SELECT g.child_id, n.id, 1, ARRAY[g.child_id], false FROM subordinates g JOIN newest n on g.parent_id = n.govt_id UNION ALL SELECT g.child_id, n.id, sg.depth + 1, path || g.child_id, g.child_id = ANY(path) FROM subordinates g JOIN newest n on g.parent_id = n.govt_id, search_graph sg WHERE g.child_id = sg.parent_id AND NOT cycle ) SELECT distinct child_id, parent_id, year FROM search_graph g join instruments on id = child_id where cycle = true order by year limit 1; """ rm_query = """delete from subordinates where child_id = %(child_id)s and parent_id = %(parent_id)s""" while True: cur.execute(query) results = cur.fetchall() if len(results): current_app.logger.info('removing cycle', dict(results[0])) cur.execute(rm_query, results[0]) else: break db.commit()
def login(): """Log in a registered user by adding the user id the the session.""" if request.method == 'POST': username = request.json['username'] password = request.json['password'] db = get_db() error = None user = db.execute( 'SELECT * FROM user WHERE username = ?', (username,) ).fetchone() if user is None: error = 'Incorrect username.' elif not check_password_hash(user['password'], password): error = 'Incorrect password.' if error is None: # store the user id in a new session and return to the index session.clear() session['user_id'] = user['id'] return redirect(url_for('index')) flash(error) return render_template('auth/login.html')
def newtask(): if request.method == 'POST': db = get_db() #TODO: Matching owner ID owner_id = 1 # get name and difficulty from the form name=request.form['name'] diffstring=request.form['difficulty'] # convert difficulty string to an integer if diffstring is 'Easy': difficulty = 1 elif diffstring is 'Medium': difficulty = 2 else: difficulty = 3 # get description from the form description=request.form['description'] # add the task to the database db.execute( 'INSERT INTO task (name, difficulty, description, owner_id)' 'VALUES (?, ?, ?, ?)', (name, difficulty, description, owner_id) ) db.commit() return redirect(url_for('index')) return render_template('newtask.html')
def find_amendments(tree, document_id, govt_id_lookup=None, links=None, db=None): links = links or get_links(db) govt_id_lookup = govt_id_lookup or get_all_govt_ids(db) db = db or get_db() with db.cursor(cursor_factory=extras.RealDictCursor) as cur: # there are typos in the acts, screw em date_pat = re.compile('^\d\d? (%s) (\d){4}$' % '|'.join([c for c in calendar.month_name if c])) results = [] for history in tree.findall('.//history-note'): data = {'note_id': None, 'target_id': document_id, 'source_id': None, 'amendment_date': None, 'unknown_source_text': None} data['note_id'] = history.attrib.get('id') try: data['source_id'] = govt_id_lookup[history.findall('.//amending-provision')[0].attrib['href']] except (IndexError, KeyError): try: text = etree.tostring(history.find('amending-leg'), method="text", encoding="UTF-8") data['source_id'] = links.get_active(text)['id'] except (MatchError, TypeError): if history.find('amending-leg') is not None: data['unknown_source_text'] = text else: continue if history.find('amendment-date') is not None: text = history.find('amendment-date').text if text and date_pat.match(text): data['amendment_date'] = text results.append(cur.mogrify("""(%(note_id)s, %(target_id)s, %(source_id)s, to_date(%(amendment_date)s, 'dd Month YYYY'), %(unknown_source_text)s) """, data)) return results
def register(): """Register a new user. Validates that the username is not already taken. Hashes the password for security. """ if request.method == 'POST': username = request.json['username'] password = request.json['password'] db = get_db() error = None if not username: error = 'Username is required' elif not password: error = 'Password is required' elif db.execute( 'SELECT id FROM user WHERE username = ?', (username,) ).fetchone() is not None: error = 'User {0} is already registered.'.format(username) if error is None: db.execute( 'INSERT INTO user (username, password) VALUES (?, ?)', (username, generate_password_hash(password)) ) db.commit() return redirect(url_for('auth.login')) flash(error) return render_template('auth/register.html')
def list_playbook(id): db = get_db() playbooks = db.execute( 'SELECT * FROM playbook_execution WHERE task_execution_id = ?', (id,) ).fetchall() return jsonify({'status': 1, 'playbook_executions': [dict(pb) for pb in playbooks]})
def get(self): query = query_parser.parse_args() # time filters = ['unix_timestamp >= ?'] if query['until'] != -1: filters.append('unix_timestamp <= ?') sql_parameters = [query['until'] - query['hours_ago'] * 3600, query['until']] else: sql_parameters = [time.time() - query['hours_ago'] * 3600] # criticality if query['criticality'] is not None: criticality = map(int, query['criticality'].split(',')) filters.append('criticality in (%s)' % ','.join(['?'] * len(criticality))) sql_parameters += criticality #category if query['category'] is not None: category = query['category'].split(',') filters.append('category in (%s)' % ','.join(['?'] * len(category))) sql_parameters += category # description if query['description'] is not None: filters.append('description like (?)') sql_parameters.append("%%%s%%" % query['description']) sql = 'select * from events' if len(filters) > 0: sql += ' where ' + ' and '.join(filters) sql += ' order by unix_timestamp desc' return get_db().execute(sql, sql_parameters).fetchall()
def update(): if(request.method == 'GET'): return response_2 try: db = get_db() except DatabaseError: return response_4 cursor = db.cursor() try: data = request.get_json() except ValueError: cursor.close() return response_2 try: thread = data['thread'] message = data['message'] slug = data['slug'] except (KeyError): cursor.close() return response_2 if(cursor.execute(get_thread_id, [thread]) == 0 ): return response_1 try: cursor.execute(update_thread, [message, slug, thread]) db.commit() except DatabaseError: cursor.close() return response_4 response = make_response_for_thread(thread, cursor) cursor.close() return make_response(0, response)
pos += 1 return PairPoint if __name__ == '__main__': start = timeit.default_timer() #max H = 5.72 , and has nan name = 'OG-CD2-3.wav' #name = speech_en.specsub(name) fs, sig = wavfile.read(name) sig = mono_detection(sig) frameSize = 3 * 441 Overlap = 0 Hop = frameSize - Overlap PairPoint = VAD(sig, fs, frameSize) feaDB = get_db() ''' for write wav use if sig.dtype != 'int16': sig=np.asarray(sig,dtype=np.int16) ''' for i in PairPoint: #for PairPoint index 0 is start point , 1 is end point StartTime = i[0] * 441 EndTime = i[1] * 441 Duration = (EndTime - StartTime) / fs sigTemp = sig[StartTime:EndTime] fea = MFCC(sigTemp, fs) #print(str(fea).replace("\n","")) feaDB.FeaSpace.insert_one({ "MFCC": str(fea).replace("\n", ""),
def get_player_name(player_id): conn = db.get_db() curs = conn.cursor() res = curs.execute( "SELECT * FROM players WHERE id={}".format(player_id)).fetchone() return db.players_row_to_dict(res)["name"]
def reset(): db = get_db() db.reset() return redirect(url_for('index'))
def single_post(post_id): response_object = {'status': 'success'} if request.method == 'PUT': # payloadの値を取得 post_data = request.form db = get_db() ''' post_data = request.get_json() title = post_data['title'] body = post_data['body'] ''' # post_id指定でupdate db = get_db() if 'file' in request.files and request.files['file'] != '': # old fileの削除 remove_image(post_id) # make file save dir (use mongodb _id) file_dir_path = os.path.join(FILE_ROOT_PATH, str(post_id)) os.makedirs(file_dir_path) # file save file = request.files['file'] filename = secure_filename(file.filename) file.save(os.path.join(file_dir_path, filename)) # file url情報をmongodbに保存 file_url = "http://localhost:5000/images/" + str( post_id) + "/" + str(filename) result = db.post.update_one({"_id": ObjectId(post_id)}, { '$set': { 'title': post_data["title"], 'body': post_data["body"], 'updated': datetime.utcnow(), "file_url": file_url } }) else: result = db.post.update_one({"_id": ObjectId(post_id)}, { '$set': { 'title': post_data["title"], 'body': post_data["body"], 'updated': datetime.utcnow() } }) response_object['message'] = 'post updated!' if request.method == 'DELETE': # mongodbから削除 result = remove_post(post_id) if result: response_object['message'] = 'post removed!' else: resuponse_object['message'] = 'post not removed!' return jsonify(response_object)
def test_data(): query = f"SELECT * FROM mpi_vectors" return gen_mpi_insert(pd.read_sql_query(query, get_db()))
def get_game_board(): conn = db.get_db() curs = conn.cursor() res = curs.execute("SELECT * FROM board").fetchone() print("###### " + str(res)) return db.board_row_to_dict(res)
def get_current_id(): conn = db.get_db() curs = conn.cursor() res = curs.execute("SELECT * FROM board").fetchone() print(res) return db.board_row_to_dict(res)["cur_player_id"]
def user_query(): db = get_db() curr = db.cursor() sql_all = "select * from joininfos;" curr.execute(sql_all) return curr.fetchall()
def validate(): #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #@@@@@@@@@@@@@@@@@Initializations@@@@@@@@@@@@@@@@@ #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ datasetId = request.args.get('datasetId') db = get_db() constraintDiscoveryMethod = request.args.get('constraintDiscoveryMethod') interpretationMethod = request.args.get('interpretationMethod') clusteringMethod = request.args.get('clusteringMethod') dQTestToolHelper = DQTestToolHelper() dataCollection = DataCollection() testing = Testing() # numberOfSuspiciousDataFrame = pd.read_sql( sql="select count(*) from dataRecords_" + datasetId + " where status like 'suspicious%'", con=db) numberOfSuspicious = numberOfSuspiciousDataFrame[ numberOfSuspiciousDataFrame.columns.values[0]].values[0] suspiciousDataFrame = pd.read_sql(sql="select * from dataRecords_" + datasetId + " where status like 'suspicious%'", con=db) dataFrame = pd.read_sql(sql="SELECT * FROM dataRecords_" + datasetId, con=db) AFdataFrameOld = pd.DataFrame(columns=[dataFrame.columns.values[0]]) # if request.method == "POST": #select actual faluts from previous run before updating the database - we need this information to measure the false negative rate AFdataFrameOld = pd.read_sql(sql="select distinct " + dataFrame.columns.values[0] + " from actualFaults_" + datasetId, con=db) if request.form.get('evaluation'): return redirect( url_for('DQTestTool.evaluation', datasetId=datasetId)) #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #@@@@@@@@@@@@@@Incorporate domain knowledge@@@@@@@@ #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ numberOfClusters = request.form["numberOfClusters"] #maxInvalidityScoreOfNormalData=[] if numberOfClusters: for i in range(int(numberOfClusters)): if str(i) in request.form.getlist('Group'): db.execute("Update dataRecords_" + datasetId + " set status='actualFaults_" + str(i) + "' where status='suspicious_" + str(i) + "'") else: db.execute( "Update dataRecords_" + datasetId + " set status='valid' where status='suspicious_" + str(i) + "'") #db.execute("Update dataRecords_"+datasetId+" set status='clean' where status='suspicious_"+str(i)+"'") faultyRecordFrame, normalRecordFrame, invalidityScoresPerFeature, invalidityScores, faultyThreshold, bestModelFileName = dQTestToolHelper.constraintDiscoveryAndFaultDetection( db, datasetId, dataFrame, constraintDiscoveryMethod, AFdataFrameOld, suspiciousDataFrame) #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #@@@@@@@@Cluster suspicious records@@@@@@@@@@ #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #If you want to work with data directly for clustering, use faultyRecordFrame directly. Now it clusters based on invelidity score per feature dataFrames = [] if clusteringMethod == "som": #Detect faulty records based on invalidity scores faultyInvalidityScoreFrame = testing.detectFaultyRecords( invalidityScoresPerFeature, invalidityScores, faultyThreshold ) #,statistics.mean(invalidityScores))#,np.percentile(invalidityScores,0.5)) faultyInvalidityScoreFrame.columns = dataFrame.columns.values[:-1] som = SOM(5, 5, len(faultyInvalidityScoreFrame.columns.values) - 1, 400) dataFrames = som.clusterFaultyRecords( faultyInvalidityScoreFrame.drop( [faultyInvalidityScoreFrame.columns.values[0]], axis=1), faultyRecordFrame) elif clusteringMethod == "kprototypes": faultyRecordFramePreprocessed = dataCollection.preprocess( faultyRecordFrame) kmeans = H2oKmeans() """bestClusteringModel=kmeans.tuneAndTrain(faultyInvalidityScoreFrame.drop([faultyInvalidityScoreFrame.columns.values[0],'invalidityScore'],axis=1)) dataFrames=kmeans.clusterFaultyRecords(bestClusteringModel,faultyInvalidityScoreFrame.drop([faultyInvalidityScoreFrame.columns.values[0],'invalidityScore'],axis=1), faultyRecordFrame)""" bestClusteringModel = kmeans.tuneAndTrain( faultyRecordFramePreprocessed.drop( [faultyRecordFramePreprocessed.columns.values[0]], axis=1)) dataFrames = kmeans.clusterFaultyRecords( bestClusteringModel, faultyRecordFramePreprocessed.drop( [faultyRecordFramePreprocessed.columns.values[0]], axis=1), faultyRecordFrame) #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #Update status of suspicious groups in database@ #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ db.execute("Update dataRecords_" + datasetId + " set status='invalid' where status like 'actual%' ") i = 0 for dataFrame in dataFrames: dataFrame.to_sql('suspicious_i_temp_' + datasetId, con=db, if_exists='replace', index=False) db.execute("Update dataRecords_" + datasetId + " set status='suspicious_" + str(i) + "' where " + dataFrame.columns.values[0] + " in (select " + dataFrame.columns.values[0] + " from suspicious_i_temp_" + datasetId + ")") db.execute("Drop table suspicious_i_temp_" + datasetId) i = i + 1 numberOfClusters = i faulty_records_html = [] cluster_scores_fig_url = [] cluster_dt_url = [] cluster_interpretation = [] treeRules = [] #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #@@@@@@@@@@ Add interpretations to groups@@@@ #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #show the suspicious groups as HTML tables for i in range(int(numberOfClusters)): faulty_records = dataFrames[i] faulty_records_html.append( faulty_records.drop(['status'], axis=1).to_html()) faulty_attributes = dataFrame.columns.values[1:-2] if constraintDiscoveryMethod == "H2O_Autoencoder": cluster_scores = invalidityScoresPerFeature.loc[ invalidityScoresPerFeature[dataFrame.columns.values[0]].isin( faulty_records[dataFrame.columns.values[0]])] #X=dataFrame.columns.values[1:-2] X = dataFrame.columns.values[1:-1] Y = cluster_scores.mean().tolist()[1:] cluster_scores_fig_url.append(dataCollection.build_graph(X, Y)) #indicate the attributes with high invalidity score values faulty_attributes_indexes = [ i for i, v in enumerate(Y) if v > np.percentile(Y, 70) ] faulty_attributes = X[faulty_attributes_indexes] #Add decision trees normalRecordFrame['label'] = 'valid' faulty_records['label'] = 'suspicious' decisionTreeTrainingFrame = pd.concat( [normalRecordFrame, faulty_records]) decisionTreeTrainingFramePreprocessed = dataCollection.preprocess( decisionTreeTrainingFrame) tree = H2oGradientBoosting() if interpretationMethod == "Sklearn Decision Tree": tree = SklearnDecisionTree() if interpretationMethod == "Sklearn Random Forest": tree = SklearnRandomForest() if interpretationMethod == "H2o Random Forest": tree = H2oRandomForest() treeModel = tree.train(decisionTreeTrainingFramePreprocessed, faulty_attributes, 'label') numberOfTrees = 3 decisionTreeImageUrls = [] for i in range(numberOfTrees): decisionTreeImageUrls.append( tree.visualize(treeModel, faulty_attributes, ['valid', 'suspicious'], tree_id=i)) cluster_dt_url.append(decisionTreeImageUrls) treeCodeLines = tree.treeToCode(treeModel, faulty_attributes) treeRules.append(tree.treeToRules(treeModel, faulty_attributes)) cluster_interpretation.append(tree.interpret(treeCodeLines)) return render_template('validate.html', data='@'.join(faulty_records_html), datasetId=datasetId, numberOfClusters=numberOfClusters, fig_urls=cluster_scores_fig_url, cluster_dt_url=cluster_dt_url, cluster_interpretation=cluster_interpretation, treeRules=treeRules, bestModelFile='/static/model/' + bestModelFileName)
def get_player_count(): conn = db.get_db() curs = conn.cursor() rows = curs.execute("SELECT COUNT(*) FROM players").fetchone() return rows[0]
def delete_payments(): db = get_db() db.execute('DELETE FROM payees') db.execute('DELETE FROM payments') db.commit() close_db()
def create(id_, name, email): db = get_db() db.get_collection('Users').insert_one({"_id": id_, "name": name, "email": email})
seasontourney.loc[seasontourney['TourneyWin'] == 2, 'TourneyResultStr'] = 'Sweet 16' seasontourney.loc[seasontourney['TourneyWin'] == 1, 'TourneyResultStr'] = 'Rnd of 32' seasontourney.loc[seasontourney['TourneyWin'] == 0, 'TourneyResultStr'] = 'Rnd of 64' seasonteams_out = pd.merge(left=seasonteams_out, right=seasontourney, how='left', on=['Season', 'TmID']) timer.split('Pre-write: ') # Initialize DB connection db = get_db() # Read in data, convert to dict, insert records into collection db.games.drop() ''' data = rsg_out.to_dict('records') timer.split('Post-dict of rsg dataframe') db.games.insert_many(data,ordered=False) timer.split('Post-insert of rsg dataframe') print('Inserted {} records into database'.format(len(data))) ''' db.seasonteams.drop() data = seasonteams_out.to_dict('records') timer.split('Post-dict of seasonteams dataframe')
def delete_payment(payment_id): db = get_db() db.execute('DELETE FROM payees WHERE payment_id = ?', (payment_id, )) db.execute('DELETE FROM payments WHERE id = ?', (payment_id, )) db.commit() close_db()
def get_tokenized_sentences_count_status(self, record_id,bleu_return): try: collections = get_db()[DB_SCHEMA_NAME] avg_bleu_score = 0 if bleu_return: target_docs= collections.aggregate([ { '$match': {'$and': [{"record_id": record_id}, {'data_type':'text_blocks'}]} }, { '$unwind': "$data.tokenized_sentences" }, {'$match':{"data.tokenized_sentences.save":True}}, { "$project": {"tgt_nmt":"$data.tokenized_sentences.s0_tgt","tgt_user":"******","_id":0}}]) tgt_nmt=[] tgt_user=[] for doc in target_docs: tgt_nmt.append(doc["tgt_nmt"]) tgt_user.append(doc["tgt_user"]) if tgt_nmt and tgt_user: preds=tgt_nmt refs=[tgt_user] sacre_bleu = sacrebleu.corpus_bleu(preds,refs).score log_info("\n*************************\nBleu score calculation", AppContext.getContext()) # log_info("\n**Machine translated sentences:{}\n **User translated sentences:{}".format(preds, refs), AppContext.getContext()) log_info("\nSACRE_BLEU value** :{}".format(sacre_bleu), AppContext.getContext()) log_info("\n*****************************", AppContext.getContext()) avg_bleu_score = round((sacre_bleu/100),2) docs = collections.aggregate([ { '$match': {'$and': [{"record_id": record_id}, {'data_type':'text_blocks'}]} }, { '$unwind': "$data.tokenized_sentences" }, { "$addFields": { "data.tokenized_sentences.words": { "$split": [ "$data.tokenized_sentences.src", " " ] }}}, {"$addFields": { "sent_wrd_count": { "$size":"$data.tokenized_sentences.words" }}}, { "$group": { "_id": "$data.tokenized_sentences.save", "doc_sent_count": { "$sum": 1 }, "doc_wrd_count" : { "$sum": "$sent_wrd_count" }, "total_time_spent":{"$sum": "$data.tokenized_sentences.time_spent_ms"} }} ]) # "total_bleu_score":{"$sum": "$data.tokenized_sentences.bleu_score"}, empty_sent_count = 0 saved_sent_count = 0 unsaved_sent_count = 0 empty_wrd_count = 0 saved_wrd_count = 0 unsaved_wrd_count = 0 total_saved_bleu_score = 0 total_time_spent_ms = 0 for doc in docs: if doc['_id'] == None: empty_sent_count = doc['doc_sent_count'] empty_wrd_count = doc['doc_wrd_count'] if doc['_id'] == True: saved_sent_count = doc['doc_sent_count'] saved_wrd_count = doc['doc_wrd_count'] total_time_spent_ms = doc["total_time_spent"] if doc['_id'] == False: unsaved_sent_count = doc['doc_sent_count'] unsaved_wrd_count = doc['doc_wrd_count'] return { 'total_sentences': empty_sent_count + saved_sent_count + unsaved_sent_count, 'completed_sentences': saved_sent_count, 'total_words': empty_wrd_count + saved_wrd_count + unsaved_wrd_count, 'completed_words': saved_wrd_count, 'avg_bleu_score' : avg_bleu_score, 'total_time_spent_ms': total_time_spent_ms } except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return { 'total_sentences': 0, 'completed_sentences': 0, 'total_words': 0, 'completed_words': 0, 'avg_bleu_score' : 0, 'total_time_spent_ms': 0 }
def __init__(self, res_id): self.res_id = str(res_id) self.id_length = len(self.res_id) self.client_collection = get_db()[CLIENTS_COLLECTION]
def slett_notat(id): delete_notat(db.get_db(), id) return redirect('/')
def delete(id): get_post(id) db = get_db() db.execute('DELETE FROM post WHERE id = ?', (id, )) db.commit() return redirect(url_for('blog.index'))
def delete_block(bid): database = db.get_db() database.execute('DELETE FROM blocks WHERE id = %d' % bid) database.commit() return ('', 204)
from kivy.lang import Builder from kivy.factory import Factory from kivy.uix.boxlayout import BoxLayout from kivy.uix.screenmanager import Screen from kivymd.dialog import MDDialog from kivymd.label import MDLabel from kivymd.theming import ThemeManager from kivy.metrics import dp from kivymd.bottomsheet import MDGridBottomSheet from kivy.clock import Clock Clock.max_iteration = 60 import threading import sqlite3 import db stat = db.get_db('all') Builder.load_string(""" #:import Toolbar kivymd.toolbar.Toolbar #:import ThemeManager kivymd.theming.ThemeManager #:import C kivy.utils.get_color_from_hex #:import Toolbar kivymd.toolbar.Toolbar <sponsers> name:'spons' BoxLayout: orientation:'vertical' Toolbar: id: toolbar title: 'Sponsors' md_bg_color: app.theme_cls.primary_color background_palette: 'Primary'
def __init__(self): self.zh_question = None self.en_question = None self.new_zh_question = None self.answer = None self.db = get_db()
def notater(): notater = get_notater(db.get_db()) return render_template('index.html', notater = notater)
def cng_screen1(self, non, pro=0): if (pro == 1): if ((non == "prof1") & (not self.prof_con[0])): print('loading') self.prof_con[0] = True self.main_widget.ids.scr_mngr.add_widget(prof.prof1()) if ((non == "prof2") & (not self.prof_con[1])): self.prof_con[1] = True self.main_widget.ids.scr_mngr.add_widget(prof.prof2()) if ((non == "prof3") & (not self.prof_con[2])): self.prof_con[2] = True self.main_widget.ids.scr_mngr.add_widget(prof.prof3()) if ((non == "cont") & (not self.prof_con[3])): self.prof_con[3] = True self.main_widget.ids.scr_mngr.add_widget(events.contact()) if ((non == "shed") & (not self.prof_con[4])): try: self.main_widget.ids.scr_mngr.add_widget(listest.lis()) except Exception: self.dialog0 = pop.conf_pop( 'no Internet Connection', 'Dismiss', lambda x: self.dialog0.dismiss()) self.dialog0.open() self.dialoga.dismiss() return self.prof_con[4] = True try: self.dialoga.dismiss() except Exception: pass self.main_widget.ids.scr_mngr.current = non return if not self.main_widget.ids.scr_mngr.has_screen('show' + str(non)): if (non == 1): self.main_widget.ids.scr_mngr.add_widget(events.show1()) self.event_scr_effects[non - 1] = 0 if (non == 2): self.main_widget.ids.scr_mngr.add_widget(events.show2()) self.event_scr_effects[non - 1] = 0 if (non == 3): self.main_widget.ids.scr_mngr.add_widget(events.show3()) self.event_scr_effects[non - 1] = 0 if (non == 4): self.main_widget.ids.scr_mngr.add_widget(events.show4()) self.event_scr_effects[non - 1] = 0 if (non == 5): self.main_widget.ids.scr_mngr.add_widget(events.show5()) self.event_scr_effects[non - 1] = 0 if (non == 6): self.main_widget.ids.scr_mngr.add_widget(events.show6()) self.event_scr_effects[non - 1] = 0 self.path_back.append('event') try: self.dialoga.dismiss() except Exception: pass self.main_widget.ids.scr_mngr.current = 'show' + str(non) return if self.event_scr_effects[non - 1] == 0: self.path_back.append('event') try: self.dialoga.dismiss() except Exception: pass self.main_widget.ids.scr_mngr.current = 'show' + str(non) return posta = int(db.get_db(non)) if (posta == 1): nor = 'registered' nocolor = self.theme_cls.green_color else: nor = 'enroll' nocolor = self.theme_cls.primary_color self.main_widget.ids.scr_mngr.get_screen('show' + str(non)).ids.but.text = nor self.main_widget.ids.scr_mngr.get_screen( 'show' + str(non)).ids.but.md_bg_color = nocolor self.event_scr_effects[non - 1] = 0 self.path_back.append('event') try: self.dialoga.dismiss() except Exception: pass self.main_widget.ids.scr_mngr.current = 'show' + str(non)
def test_get_db(): db = get_db() assert (db is not None)
def apiMain(): print(get_db()) return 'Web docs soon, read code or md file for now.'
def check_login(self): db, _ = get_db() user_id = session.get('user_id') if user_id is None: return False else: g.user = db.execute("SELECT * FROM users WHERE id = ?", (user_id, )).fetchone() g.user_status = db.execute("SELECT * FROM accessLevels WHERE id = ?", (g.user['access_level_id'],)).fetchone() if not g.user['access_decomission_on'] is None: if datetime.utcnow() >= datetime.strptime(g.user['access_decomission_on'], '%Y-%m-%d %H:%M:%S'): if g.user['access_decomission_to'] is None: query = "UPDATE users SET access_level_id = {aid} WHERE id = {id}".format( aid = "(SELECT id FROM accessLevels WHERE access_level = 'open')", id = user_id ) db.execute(query) db.commit() g.user_status = 'open' g.user = db.execute("SELECT * FROM user WHERE id = ?", (user_id, )).fetchone() else: query = "UPDATE users SET access_level_id = ? WHERE id = ? " db.execute(query,(g.user['access_decomission_to'], user_id)) db.commit() g.user_status = db.execute("SELECT access_level FROM accessLevels WHERE id = ?", (g.user['access_decomission_to'],)).fetchone() g.user = db.execute("SELECT * FROM user WHERE id = ?", (user_id,)).fetchone() if g.user_status != 'open' and not g.user_status['decomission_on'] is None: if datetime.utcnow() >= datetime.strptime(g.user_status['decomission_on'], '%Y-%m-%d %H:%M:%S'): non_decomissioned = False while not non_decomissioned: if g.user_status['decomission_to'] is None: db.execute("UPDATE users SET access_level_id = {ali} WHERE id = {id}".format( ali = "(SELECT id From accessLevels WHERE access_level = 'open')", id = user_id )) db.commit() g.user_status = 'open' non_decomissioned = True else: g.user_status = db.execute('SELECT * FROM accessLevels WHERE id = ?', (g.user_status['decomission_to'],)).fetchone() if not g.user_status['decomission_on'] is None: if datetime.utcnow() < datetime.strptime(g.user_status['decomission_on'], '%Y-%m-%d %H:%M:%S'): db.execute("UPDATE users SET access_level_id = ? WHERE id = ?", (g.user_status['id'], user_id)) db.commit() g.user_status = g.user_status['access_level'] g.user = db.execute("SELECT * FROM users WHERE id = ?", (user_id,)).fetchone() non_decomissioned = True else: db.execute("UPDATE users SET access_level_id = ? WHERE id = ?", (g.user_status['id'],user_id)) db.commit() g.user = db.execute("SELECT * FROM users WHERE id = ?",(user_id,)).fetchone() g.user_status = g.user_status['access_level'] non_decomissioned = True elif g.user_status != 'open' and g.user_status['decomission_on'] is None: db.execute("UPDATE users SET access_level_id = ? WHERE id = ?", (g.user_status['id'],user_id)) db.commit() g.user = db.execute("SELECT * FROM users WHERE id = ?",(user_id,)).fetchone() g.user_status = g.user_status['access_level'] elif g.user_status is None: query = "UPDATE users SET access_level_id = {aid} WHERE id = {id}".format( aid = "(SELECT id FROM accessLevels WHERE access_level = 'open')", id = user_id ) db.execute(query) db.commit() g.user = db.execute("SELECT * FROM users WHERE id = ?",(user_id,)) g.user_status = 'open' if g.user_status != 'open' and g.user_status != 'restricted' and g.user_status != 'owner' and g.user_status != 'admin' and g.user_status != 'deactivated': query = "SELECT id, access_level FROM accessLevels WHERE id = {id}".format( id = "(SELECT equivelent_to FROM accessLevels WHERE access_level = '{ac}')".format( ac = g.user_status ) ) equivalence = db.execute(query).fetchone() if equivalence is None: g.user_status = 'open' query = "UPDATE accessLevels SET equivelent_to = {eq} WHERE id = {id}".format( eq = "(SELECT id FROM accessLevels WHERE access_level = 'open')", id = user_id ) else: g.user_status = equivalence['access_level'] return True
def user_loader(username): db_conn = db.get_db() return get_user(db_conn, username)
def test_insert_and_get_db(data): db.insert(data) result = db.get_db() return result == data
def __enter__(self): return WrappedDatabase(get_db(), self.res_id)
def get_payment(payment_id): db = get_db() result = db.execute('SELECT * FROM payments WHERE id = ?', (payment_id, )) payment = dict(result.fetchone()) expand_payment(payment) close_db()