def get_history_for(word): '''Given a word, get how many times it appeared for each frontpage of each publications followed.''' # This is way to tiresome to do with ORM. # So... let's do it the old way ! query = """select pubs.pubdate, p.name, coalesce(cnt.count, 0) from (select c.count, to_char(f.time_of_publication, 'YYYY/MM/DD') as pdate, p.id from wordcount c left join frontpage f on f.id = c.frontpage_id left join publication p on f.publication_id = p.id left join word w on c.word_id = w.id where word = :x) cnt full outer join (select distinct to_char(f.time_of_publication, 'YYYY/MM/DD') as pubdate, p.id from frontpage f cross join publication p) pubs on cnt.id = pubs.id and cnt.pdate = pubs.pubdate left outer join publication p on p.id = pubs.id;""" return to_stats(get_engine().execute(text(query), x=word).\ fetchall(), "date")
def delete_item(tablename): con = database.get_engine().connect() data = request.values.get('key') print(data, file=sys.stderr) if data != '': if tablename == 'Take_Course': data = data.split('&') temp = [] temp.append(data[0].split('=')[1]) temp.append(data[1].split('=')[1]) data = temp else: data = data.split('=')[1] database.delete_data(tablename, data) ##Execute select all from Academy table table = database.get_table(tablename) table = table.select().execute().fetchall() table_name = tablename ##Parse result data columns, results = parse_result(table) con.close() return render_template('delete.html', table_name=table_name, columns=columns, results=results)
def get_algorithms(current_skyline_app): """ Returns a dict of algorithms and their ids. """ current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) algorithms = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error( 'error :: get_algorithms :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_algorithms :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_algorithms :: could not get a MySQL engine - %s' % str(err)) algorithms_list = [] if engine: try: connection = engine.connect() stmt = 'SELECT DISTINCT(algorithm) FROM algorithms' result = connection.execute(stmt) for row in result: algorithms_list.append(row['algorithm']) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_algorithms :: failed to build algorithms_list - %s' % str(err)) if algorithms_list: try: connection = engine.connect() for algorithm in algorithms_list: stmt = 'SELECT id FROM algorithms WHERE algorithm=\'%s\'' % algorithm result = connection.execute(stmt) for row in result: algorithms[algorithm] = row['id'] break connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_algorithms :: failed to build algorithms - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) return algorithms
def restart_autoincrement(model): """Restarts autoincrement counter""" engine = get_engine(model.__bind_key__) db.session.close() if engine.dialect.name == 'sqlite': warn(UserWarning('Sqlite increment reset is not supported')) return engine.execute('ALTER TABLE ' + model.__tablename__ + ' AUTO_INCREMENT = 1;')
def get_an_engine(): try: engine, log_msg, trace = get_engine(skyline_app) return engine, log_msg, trace except: logger.error(traceback.format_exc()) log_msg = 'error :: failed to get MySQL engine in spin_process' logger.error('error :: failed to get MySQL engine in spin_process') return None, log_msg, trace
def textarea_submit(): status = '' # Get text from requset. request_code = request.form.get('request-text') # Get text is not empty. if request_code != '': # Check if exist invalid operator. request_code = string_process(request_code) # Invalid operator exist, return warn. if request_code == 0: return render_template( 'textarea.html', status={'warn': status_code['operator_warn']}, user_input='Nope') # Not exist, send code to database. else: try: engine = database.get_engine() cursor = engine.execute(request_code).fetchall() # Check if result is not empty. if cursor: #Parse result columns, results = parse_result(cursor) # Success, return result. return render_template( 'textarea.html', status={'success': status_code['success']}, user_input=request_code, columns=columns, results=results) # Result is empty, show status message. else: return render_template( 'textarea.html', status={'success': status_code['no_result']}, user_input=request_code) # Something error occure, return error message. except Exception as ex: return render_template('textarea.html', status={'error': status_code['error']}, user_input=request_code, error_message=ex) return render_template('textarea.html', status={'warn': status_code['empty_warn']})
def upgrade_rtd(self): """ Pull data from database, that is not yet in the local cache. This function seems to work but is not properly tested. """ rtd = self.load_data() len_beginning = len(rtd) print('Rows befor update:', len_beginning) max_date = rtd['ar_pt'].max().compute() - datetime.timedelta(days=2) max_date = max_date.to_pydatetime() print('getting data added since', max_date) from sqlalchemy import Column, DateTime from sqlalchemy import sql from sqlalchemy.dialects import postgresql with get_engine().connect() as connection: query = sql.select([Column(c) for c in self.df_dict] + [Column('hash_id')])\ .where((Column('ar_pt', DateTime) > str(max_date)) | (Column('dp_pt', DateTime) > str(max_date)))\ .select_from(sql.table(Rtd.__tablename__))\ .alias('new_rtd') view_query = 'CREATE OR REPLACE VIEW new_rtd AS {}'\ .format(str(query.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) connection.execute(view_query) new_rtd = dd.read_sql_table('new_rtd', DB_CONNECT_STRING, index_col='hash_id', meta=self.meta, npartitions=20) new_rtd.to_parquet(self.DATA_CACHE_PATH + '_new', engine='pyarrow', schema='infer') new_rtd = dd.read_parquet(self.DATA_CACHE_PATH + '_new', engine='pyarrow') new_rtd = self._parse(new_rtd) new_rtd.to_parquet(self.DATA_CACHE_PATH + '_new', engine='pyarrow', schema='infer') new_rtd = dd.read_parquet(self.DATA_CACHE_PATH + '_new', engine='pyarrow') # Remove changes from rtd that are also present in new_rtd rtd = rtd.loc[~rtd.index.isin(new_rtd.index.compute()), :] rtd = dd.concat([rtd, new_rtd], axis=0, ignore_index=False) # We need to recategorize here, as the categories might grow from int8 to int16 # and then they need to be recalculated. rtd = self._categorize(rtd) rtd.to_parquet(self.DATA_CACHE_PATH, engine='pyarrow', schema='infer') rtd = self.load_data() self._save_encoders(rtd) len_end = len(rtd) print('Rows after getting new data:', len_end) print('Got', len_end - len_beginning, 'new rows') print('Number of dublicate indicies', rtd.index.compute().duplicated(keep='last').sum())
def layers_get_an_engine(): try: engine, fail_msg, trace = get_engine(skyline_app) return engine, fail_msg, trace except: trace = traceback.format_exc() logger.error('%s' % trace) fail_msg = 'error :: layers :: get_an_engine :: failed to get MySQL engine' logger.error('%s' % fail_msg) return None, fail_msg, trace
def get_autoincrement(model): """Fetch autoincrement value from database. It is database-engine dependent, might not work well with some drivers. """ engine = get_engine(model.__bind_key__) return engine.execute('SELECT `AUTO_INCREMENT`' + ' FROM INFORMATION_SCHEMA.TABLES' + ' WHERE TABLE_SCHEMA = DATABASE()' ' AND TABLE_NAME = \'%s\';' % model.__tablename__).scalar()
def get_an_engine(): try: engine, fail_msg, trace = get_engine(skyline_app) return engine, fail_msg, trace except: trace = traceback.format_exc() logger.error('%s' % trace) fail_msg = 'error :: failed to get MySQL engine for' logger.error('%s' % fail_msg) # return None, fail_msg, trace raise # to webapp to return in the UI
def get_an_engine(): try: engine, log_msg, trace = get_engine(skyline_app) return engine, log_msg, trace except Exception as e: trace = traceback.format_exc() logger.error(trace) log_msg = 'error :: cloudbursts :: find_related :: failed to get MySQL engine - %s' % e logger.error( 'error :: cloudbursts :: find_related :: failed to get MySQL engine - %s' % e) return None, log_msg, trace
def __init__(self) -> None: try: engine = get_engine() Base.metadata.create_all(engine) engine.dispose() except sqlalchemy.exc.OperationalError: print('database.plan running offline!') self.engine = None self.session = None self.queue = []
def fp_create_get_an_engine(current_skyline_app): current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) try: engine, fail_msg, trace = get_engine(current_skyline_app) return engine, fail_msg, trace except: trace = traceback.format_exc() current_logger.error('%s' % trace) fail_msg = 'error :: fp_create_get_an_engine :: failed to get MySQL engine' current_logger.error('%s' % fail_msg) return None, fail_msg, trace
def query_count(query, params=None): """ Executes the query and returns the count of the number rows that would be returned by this query. This will not actually return the results. :param query: the actual sql parameterized query to execute. :param params: the parameters to be used with the query :return: the total number of elements returned by the query. This is not limited by a page and/or pageSize """ count_query = 'SELECT COUNT(*) FROM (' + query + ') AS a;' response = database.get_engine().execute(count_query, params) count = response.fetchone() response.close() return count[0]
def get_table(tablename): con = database.get_engine().connect() table = database.get_table(tablename) table = table.select().execute().fetchall() table_name = tablename columns, results = parse_result(table) con.close() return render_template('index.html', table_name=table_name, columns=columns, results=results)
def get_db_fp_timeseries(current_skyline_app, metric_id, fp_id): """ Return a features profile timeseries from the database as a list """ function_str = 'functions.database.queries.fp_timeseries' current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) timeseries = [] try: engine, fail_msg, trace = get_engine(current_skyline_app) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not get a MySQL engine - %s' % ( function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return timeseries try: start_db_query = timer() metric_fp_ts_table = 'z_ts_%s' % str(metric_id) stmt = 'SELECT timestamp, value FROM %s WHERE fp_id=%s' % ( metric_fp_ts_table, str(fp_id)) connection = engine.connect() for row in engine.execute(stmt): fp_id_ts_timestamp = int(row['timestamp']) fp_id_ts_value = float(row['value']) if fp_id_ts_timestamp and fp_id_ts_value: timeseries.append([fp_id_ts_timestamp, fp_id_ts_value]) connection.close() end_db_query = timer() current_logger.info( '%s :: determined %s values for the fp_id %s time series in %6f seconds' % (function_str, str(len(timeseries)), str(fp_id), (end_db_query - start_db_query))) except Exception as e: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: %s :: could not determine timestamps and values from %s - %s' % (function_str, metric_fp_ts_table, e)) if engine: engine_disposal(current_skyline_app, engine) return timeseries
def delete(): ##Get database engine con = database.get_engine().connect() ##Execute select all from Academy table table = database.academies.select().execute().fetchall() table_name = 'Academy' ##Parse result data columns, results = parse_result(table) con.close() return render_template('delete.html', table_name=table_name, columns=columns, results=results)
def get_demo(demoname): engine = database.get_engine() table_name = demoname ## Get demo name by url demo_title = demo_query_title[demoname] demo_code = demo_query[demoname] demo_result = engine.execute(demo_code).fetchall() columns, results = parse_result(demo_result) return render_template('demo.html', table_name=table_name, columns=columns, results=results, demo_title=demo_title, demo_code=demo_code)
def query_request(tablename): con = database.get_engine().connect() query = request.args.get('text', '') if query != '': columns, result = database.query_execute(tablename, query) data = parse_query_result(columns, result) json_data = json.dumps({'columns': columns, 'results': data}) con.close() return json_data else: table = database.get_table(tablename) table = table.select().execute().fetchall() table_name = tablename columns, data = parse_result(table) json_data = json.dumps({'columns': columns, 'results': data}) return json_data
def reset_relational_db(app, **kwargs): name = kwargs.get('bind', 'default') print('Removing', name, 'database...') engine = get_engine(name, app) set_foreign_key_checks(engine, active=False) db.session.commit() db.reflect() db.session.commit() meta = MetaData() meta.reflect(bind=engine) for table in reversed(meta.sorted_tables): engine.execute(table.delete()) set_foreign_key_checks(engine, active=True) print('Removing', name, 'database completed.') print('Recreating', name, 'database...') db.create_all(**kwargs) print('Recreating', name, 'database completed.')
def upsert_rtd(rtd: pd.DataFrame): """ Upsert dataframe to db using pangres Parameters ---------- rtd: pd.DataFrame Data to upsert """ if not rtd.empty: engine = get_engine() pangres.upsert(engine, rtd, if_row_exists='update', table_name=Rtd.__tablename__, dtype=sql_types, create_schema=False, add_new_columns=False, adapt_dtype_of_empty_db_columns=False) engine.dispose()
def test_has_motif(self): engine = get_engine('bio') load_regex_support(engine) p = Protein(refseq='NM_007', id=1, sequence='ABCDEFGHIJKLMNOPQRSTUVWXYZ') s = Site(position=3, residue='C', protein=p) db.session.add(s) db.session.commit() # Python side assert s.has_motif('.{7}C[DX].{6}') assert not s.has_motif('.{7}C[XY].{6}') # SQL side assert Site.query.filter(Site.has_motif('.{7}C[DX].{6}')).one() assert not Site.query.filter(Site.has_motif('.{7}C[XY].{6}')).all()
def query_result(query, params=None, pageSize=None, page=None): """ Actually execute the query. This will use the pageSize and page values to limit the number of results that re returned. The query is assumed to be a parameterized query, and params contains the actual values. For example the get a specific user we can use: SELECT * FROM user WHERE id=%s and add a value to params that will be safely inserted in the actual query :param query: the actual sql parameterized query to execute. :param params: the parameters to be used with the query :param pageSize: the maximum number of elements to be returned (adds a LIMIT pageSize) :param page: which page to return, 0 based. (adds a OFFSET page * pageSize) :return: the results of the query. """ if not params: params = [] if not pageSize: pageSize = DEFAULT_PAGE_SIZE query += ' LIMIT %s' params.append(pageSize) if page: query += ' OFFSET %s' params.append(pageSize * page) return database.get_engine().execute(query, params)
def main(): #GET DATABASE SESSION AND ENGINE engine = database.get_engine() session = database.get_session() #SET VARIABLES dataList = getInfo() HOST = dataList[2] #The server IP PORT = int(dataList[3]) #PORT #CONNECTION SETUP s = socket(AF_INET, SOCK_STREAM) s.bind((HOST, PORT)) s.listen(1) conn, addr = s.accept() print (str(conn)) print str(addr) #PRINT ADDRESS OF CONNECTED DEVICE print 'Connected by', addr while True: data = conn.recv(1024) if (repr(data) == "''"): conn, addr = s.accept() #PRINT MESSAGES TO THE DISPLAY else: print repr(data) ##VARIABLES: client_ip = '' client_pass = '' client_username = '' sw_path = '' client_os = '' #CONVERTING USER MESSAGE TO A LIST temp_user_msg = repr(data) temp_user_msg = temp_user_msg[1:-1] user_msg = temp_user_msg.split(' ') if (user_msg[0] == 'Matlab'): function = user_msg[2] function = re.findall(".*?\(", function) function = function[0][:-1] print function #______________________________________________________________________________________________________________ # CLOSING CONNECTION #______________________________________________________________________________________________________________ if (repr(data) == "'eof'"): s.shutdown(0) s.close() #______________________________________________________________________________________________________________ # Clients Screenshots #______________________________________________________________________________________________________________ #Takes a screenshot from the mac. if (user_msg[0]=='snapshot'): client_name = user_msg[1] for client in session.query(Client).filter(Client.name == client_name): client_ip = client.IP client_pass = client.password client_username = client.username client_os = client.OS #CASE CLIENT IS MACOS if (client_os == 'UNIX'): conn.sendall('A snapshot from you computer webcam will be sent to your email soon.\n\n') print "DEBUG: TAKING SNAPSHOT" call (['sshpass','-p', client_pass, 'ssh', client_username + '@' + client_ip, '/usr/local/bin/imagesnap', '/Users/' + client_username + '/Desktop/snap.jpeg']) print 'DEBUG: COPYING SNAPSHOT FROM CLIENT TO SERVER' call (['sshpass','-p', client_pass, 'scp', client_username + '@' + client_ip + ':/Users/' + client_username + '/Desktop/snap.jpeg', os.getenv('HOME') + '/PowerCalcTempFiles/snap.jpeg']) print 'DEBUG: SENDING SNAPSHOT TO EMAIL' send_mail('Your snapshot from ' + client_name, '<h3>This picture was taken from your computer.</h3><h4><b>Produced by PowerCalc</b></h4>','python',1) print 'DELETING FILE FROM CLIENT' call (['sshpass','-p', client_pass, 'ssh', client_username + '@' + client_ip, 'rm', '/Users/' + client_username + '/Desktop/snap.jpeg']) print 'DEBUG: COMPLETE.\n\n SERVER IS NOW READY.\n\n' #CASE CLIENT IS WINDOWS elif (client_os == "WINDOWS"): conn.sendall('A snapshot from you computer webcam will be sent to your email soon.\n\n') print "DEBUG: TAKING SNAPSHOT" call (['sshpass','-p', client_pass, 'ssh', client_username + '@' + client_ip, 'CommandCam /filename C:\\PCTemp\\snap.jpeg']) time.sleep(5) print 'DEBUG: COPYING SNAPSHOT FROM CLIENT TO SERVER' call (['sshpass', '-p', client_pass, 'sftp', client_username + '@' + client_ip + ':snap.jpeg', os.getenv('HOME') + '/PowerCalcTempFiles/snap.jpeg']) print 'DEBUG: SENDING SNAPSHOT TO EMAIL' send_mail('Your snapshot from ' + client_name, '<h3>This picture was taken from your computer.</h3><h4><b>Produced by PowerCalc</b></h4>','python',1) print 'DELETING FILE FROM CLIENT' call (['sshpass','-p', client_pass, 'ssh', client_username + '@' + client_ip, 'del snap.jpeg']) print 'DEBUG: COMPLETE.\n\n SERVER IS NOW READY.\n\n' #______________________________________________________________________________________________________________ # MATLAB COMMANDS #______________________________________________________________________________________________________________ if (user_msg[0] == 'Matlab'): #GET CLIENT USER, IP AND PASSWORD client_name = user_msg[1] #GET CLIENT OS client_os = database.get_client_os(client_name) for client in session.query(Client).filter(Client.name == client_name): client_ip = client.IP client_pass = client.password client_username = client.username #CASE THAT CLIENT DOES NOT EXISTS if (client_ip == None or client_pass == None or client_username == None): conn.sendall ('Client does not exists in the system.') #FIND SOFTWARE PATH FOR CLIENT for software in session.query(Software).filter(Software.client_name == client_name): sw_path = str(software.path) print "______________________________" print "CLIENT DETAILS:" print client_name print client_username print '************' print client_os print sw_path print "______________________________" if (sw_path == None): conn.sendall("This software does not exists for this client") else: #Execute matlab command conn.sendall("Processing calculation.\nThe published document will be sent to your email.\n") if (client_os == 'UNIX'): print "______________UNIX MATLAB CALL______________" print "DEBUG: EXECUTING FUNCTION:" print ' '.join(map(str,user_msg[2:len(user_msg)])) call(['sshpass','-p',client_pass,'ssh', '-X', client_username+'@'+client_ip, sw_path, '-nodesktop','-r',"\"publish('/Users/" + client_username + "/Documents/MATLAB/" + function + ".m',struct('codeToEvaluate','" + ' '.join(map(str,user_msg[2:len(user_msg)]))+"','showCode',true,'outputDir','/Users/" + client_username + "/Documents/MATLAB','format','pdf')),exit\""]) print "DEBUG: COPYING PUBLISHED FILE FROM CLIENT TO SERVER" call(['sshpass','-p', client_pass, 'scp',client_username+'@'+client_ip+':/Users/' + client_username + '/Documents/MATLAB/'+function+'.pdf',os.getenv('HOME') + '/PowerCalcTempFiles/temp.pdf']) print "DEBUG: SENDING EMAIL TO USER" send_mail('Your results for '+function,'<h2>The attached file is your Matlab published file</h2><br><h4>Produced by PowerCalc</h4>','python') print "DEBUG: REMOVING FILE FROM CLIENT" call(['rm','-f', os.getenv('HOME') + '/PowerCalcTempFiles/temp.pdf']) print "DEBUG: COMPLETE!\n\nSERVER READY\n\n" else: print "______________WINDOWS MATLAB CALL______________" print "DEBUG: CALLING FOR FUNCTION:" print ' '.join(map(str,user_msg[2:len(user_msg)])) call(['sshpass', '-p', client_pass, 'ssh', client_username + '@' + client_ip, 'matlab', '-nodesktop', '-r', "\"publish('"+function+".m', struct('codeToEvaluate', '" + ' '.join(map(str,user_msg[2:len(user_msg)])) + "','outputDir','C:\\PCTemp','format','pdf')), exit;\""]) print "CONNETCED TO CLIENT" while not os.path.isfile(os.getenv('HOME') + '/PowerCalcTempFiles/temp.pdf'): call(['sshpass','-p', client_pass,'sftp',client_username+'@'+client_ip+':'+function+'.pdf',os.getenv('HOME') + '/PowerCalcTempFiles/temp.pdf']) time.sleep(30) call(['sshpass','-p', client_pass, 'ssh',client_username + '@' + client_ip, 'cmd /c del /Q C:\\PCTemp\\test.pdf']) print "DEBUG: SENDING EMAIL TO USER" send_mail('Your results for '+function,'<h2>The attached file is your Matlab published file</h2><br><h4>Produced by PowerCalc</h4>','python') print "DEBUG: REMOVING PUBLISHIED FILE FROM SERVER" call(['rm',os.getenv('HOME')+'/PowerCalcTempFiles/temp.pdf']) print "DEBUG: COMPLETE\n\nSERVER READY\n\n"
def related_to_metric_groups(current_skyline_app, base_name, metric_id): """ Returns a dict of all the metric_groups that a metric is part of. """ current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id: %s - %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id del related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_metric_id'] return related_to_metric_groups_dict
def latest_anomalies(current_skyline_app): """ Return the latest anomalies as a list of tuples, each tuple a DB row. """ function_str = 'database_queries.latest_anomalies' current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) anomalies = [] try: engine, fail_msg, trace = get_engine(current_skyline_app) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not get a MySQL engine - %s' % (function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return anomalies try: anomalies_table, fail_msg, trace = anomalies_table_meta(current_skyline_app, engine) current_logger.info(fail_msg) except Exception as e: trace = traceback.format_exc() current_logger.error('%s' % trace) fail_msg = 'error :: %s :: failed to get metrics_table meta - %s' % (function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return anomalies try: connection = engine.connect() # Replacing panorama query # query = 'select id, metric_id, anomalous_datapoint, anomaly_timestamp, full_duration, created_timestamp, anomaly_end_timestamp from anomalies ORDER BY id DESC LIMIT 10' stmt = select([anomalies_table.c.id, anomalies_table.c.metric_id, anomalies_table.c.anomalous_datapoint, anomalies_table.c.anomaly_timestamp, anomalies_table.c.full_duration, anomalies_table.c.created_timestamp, anomalies_table.c.anomaly_end_timestamp]).\ where(anomalies_table.c.id > 0).order_by(anomalies_table.c.id.desc()).\ limit(10) results = connection.execute(stmt) anomalies = [] if results is not None: for row in results: if row is not None: anomalies.append(row) if not anomalies: anomalies = [] connection.close() current_logger.info('%s :: determined %s latest anomalies' % ( function_str, str(len(anomalies)))) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not determine latest anomalies - %s' % ( function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return anomalies if engine: engine_disposal(current_skyline_app, engine) return anomalies
import database from database import Client as Client from database import Software as Software engine = database.get_engine() session = database.get_session() ip = None for client in session.query(Client).filter(Client.IP == '192.168.1.68'): ip=client.name print ip print
def init_db(): '''Create the database.''' Base.metadata.create_all(get_engine()) return "Created database."
def metric_id_from_base_name(current_skyline_app, base_name): """ Given a base name, return the metric_id """ metric_id = 0 function_str = 'database_queries.metric_id_from_base_name' current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) try: engine, fail_msg, trace = get_engine(current_skyline_app) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not get a MySQL engine - %s' % ( function_str, e) current_logger.error('%s' % fail_msg) return False, fail_msg, trace try: metrics_table, fail_msg, trace = metrics_table_meta( current_skyline_app, engine) current_logger.info(fail_msg) except Exception as e: trace = traceback.format_exc() current_logger.error('%s' % trace) fail_msg = 'error :: %s :: failed to get metrics_table meta for %s- %s' % ( function_str, base_name, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return False, fail_msg, trace try: connection = engine.connect() stmt = select([metrics_table ]).where(metrics_table.c.metric == base_name) result = connection.execute(stmt) for row in result: metric_id = int(row['id']) break connection.close() current_logger.info('%s :: determined db metric id: %s' % (function_str, str(metric_id))) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not determine id of metric from DB for %s - %s' % ( function_str, base_name, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return False, fail_msg, trace if engine: engine_disposal(current_skyline_app, engine) if not metric_id: current_logger.error('error :: %s :: no id for metric in the DB - %s' % (function_str, base_name)) return metric_id
def get_all_db_metric_names(current_skyline_app, with_ids=False): """ Given return all metric names from the database as a list """ metric_names = [] metric_names_with_ids = {} function_str = 'database_queries.get_all_db_metric_names' current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) try: engine, fail_msg, trace = get_engine(current_skyline_app) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not get a MySQL engine - %s' % (function_str, e) current_logger.error('%s' % fail_msg) return False, fail_msg, trace try: metrics_table, fail_msg, trace = metrics_table_meta(current_skyline_app, engine) current_logger.info(fail_msg) except Exception as e: trace = traceback.format_exc() current_logger.error('%s' % trace) fail_msg = 'error :: %s :: failed to get metrics_table meta - %s' % ( function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return False, fail_msg, trace try: connection = engine.connect() if with_ids: stmt = select([metrics_table.c.id, metrics_table.c.metric]) else: stmt = select([metrics_table.c.metric]) result = connection.execute(stmt) for row in result: base_name = row['metric'] metric_names.append(base_name) if with_ids: metric_names_with_ids[base_name] = row['id'] connection.close() current_logger.info('%s :: determined metric names from the db: %s' % ( function_str, str(len(metric_names)))) except Exception as e: trace = traceback.format_exc() current_logger.error(trace) fail_msg = 'error :: %s :: could not determine metric names from DB for - %s' % ( function_str, e) current_logger.error('%s' % fail_msg) if engine: engine_disposal(current_skyline_app, engine) if current_skyline_app == 'webapp': # Raise to webapp raise return False, fail_msg, trace if engine: engine_disposal(current_skyline_app, engine) if not metric_names: current_logger.error('error :: %s :: no metric names returned from the DB' % ( function_str)) if with_ids: return metric_names, metric_names_with_ids return metric_names
def get_cloudbursts(metric, namespaces, from_timestamp, until_timestamp): """ Get create a dict of all the cloudbursts. :param metric: the name of the metric :param namespaces: the namespaces to match :param from_timestamp: the from_timestamp :param until_timestamp: the until_timestamp :type metric: str :type namespaces: list :type from_timestamp: int :type until_timestamp: int :return: dict of cloudbursts :rtype: {} Returns a dict of cloudbursts { "cloudbursts": { <id>: { 'metric_id': <int>, 'metric': <str>, 'timestamp': <int>, 'end': <int>, 'duration': <int>, 'duration': <int>, 'from_timestamp': <int>, 'resolution': <int>, 'full_duration': <int>, 'anomaly_id': <int>, 'match_id': <int>, 'fp_id': <int>, 'layer_id': <int>, 'added_at': <int>, }, } } """ function_str = 'get_cloudbursts' cloudbursts_dict = {} engine = None metric_ids = [] use_filter_by_metrics = False filter_by_metrics = [] metric_names_with_ids = {} ids_with_metric_names = {} logger.info( 'get_cloudbursts - metric: %s, namespaces: %s, from_timestamp: %s, until_timestamp: %s' % (str(metric), str(namespaces), str(from_timestamp), str(until_timestamp))) if metric != 'all': filter_by_metrics = [metric] use_filter_by_metrics = True if not namespaces: namespaces = [metric] try: redis_conn_decoded = get_redis_conn_decoded(skyline_app) except Exception as err: logger.error(traceback.format_exc()) logger.error('error :: %s :: get_redis_conn_decoded failed - %s' % (function_str, err)) raise filter_by_metrics = get_filtered_metrics(redis_conn_decoded, namespaces) if namespaces: use_filter_by_metrics = True if metric != 'all': try: metric_id = int( redis_conn_decoded.hget( 'aet.metrics_manager.metric_names_with_ids', metric)) if metric_id: metric_names_with_ids[metric] = metric_id metric_ids.append(metric_id) except Exception as err: logger.error(traceback.format_exc()) logger.error( 'error :: %s :: failed to get %s from Redis hash aet.metrics_manager.metric_names_with_ids - %s' % (function_str, metric, err)) raise metric_ids, metric_names_with_ids = get_metric_ids(redis_conn_decoded, filter_by_metrics) if len(filter_by_metrics) > 1: use_filter_by_metrics = True for base_name in list(metric_names_with_ids.keys()): metric_id = int(metric_names_with_ids[base_name]) ids_with_metric_names[metric_id] = base_name try: engine, log_msg, trace = get_engine(skyline_app) except Exception as err: logger.error(traceback.format_exc()) logger.error('error :: %s :: failed to get engine - %s' % (function_str, err)) raise try: cloudburst_table, log_msg, trace = cloudburst_table_meta( skyline_app, engine) except Exception as err: logger.error(traceback.format_exc()) logger.error('error :: %s :: failed to get cloudburst_table - %s' % (function_str, err)) raise try: connection = engine.connect() if use_filter_by_metrics: stmt = select([cloudburst_table], cloudburst_table.c.metric_id.in_(metric_ids)) if from_timestamp > 0 and until_timestamp == 0: stmt = select([cloudburst_table], cloudburst_table.c.metric_id.in_(metric_ids)).\ where(cloudburst_table.c.timestamp >= from_timestamp) if from_timestamp == 0 and until_timestamp > 0: stmt = select([cloudburst_table], cloudburst_table.c.metric_id.in_(metric_ids)).\ where(cloudburst_table.c.timestamp <= until_timestamp) if from_timestamp > 0 and until_timestamp > 0: stmt = select([cloudburst_table], cloudburst_table.c.metric_id.in_(metric_ids)).\ where(cloudburst_table.c.timestamp >= from_timestamp).\ where(cloudburst_table.c.timestamp <= until_timestamp) else: stmt = select([cloudburst_table]) if from_timestamp > 0 and until_timestamp == 0: stmt = select([cloudburst_table]).\ where(cloudburst_table.c.timestamp >= from_timestamp) if from_timestamp == 0 and until_timestamp > 0: stmt = select([cloudburst_table]).\ where(cloudburst_table.c.timestamp <= until_timestamp) if from_timestamp > 0 and until_timestamp > 0: stmt = select([cloudburst_table]).\ where(cloudburst_table.c.timestamp >= from_timestamp).\ where(cloudburst_table.c.timestamp <= until_timestamp) results = connection.execute(stmt) for row in results: cloudburst_id = row['id'] metric_id = row['metric_id'] cloudbursts_dict[cloudburst_id] = dict(row) cloudbursts_dict[cloudburst_id]['metric'] = ids_with_metric_names[ metric_id] connection.close() except Exception as err: logger.error(traceback.format_exc()) logger.error('error :: %s :: failed to get cloudburst_table - %s' % (function_str, err)) raise # Reorder the dict keys for the page cloudbursts_dict_keys = [] key_ordered_cloudbursts_dict = {} if cloudbursts_dict: cloudburst_ids = list(cloudbursts_dict.keys()) first_cloudburst_id = cloudburst_ids[0] for key in list(cloudbursts_dict[first_cloudburst_id].keys()): cloudbursts_dict_keys.append(key) for cloudburst_id in cloudburst_ids: key_ordered_cloudbursts_dict[cloudburst_id] = {} for key in cloudbursts_dict[cloudburst_id]: if key == 'id': key_ordered_cloudbursts_dict[cloudburst_id][ key] = cloudbursts_dict[cloudburst_id][key] key_ordered_cloudbursts_dict[cloudburst_id][ 'metric'] = cloudbursts_dict[cloudburst_id]['metric'] for key in cloudbursts_dict[cloudburst_id]: if key not in ['id', 'metric']: key_ordered_cloudbursts_dict[cloudburst_id][ key] = cloudbursts_dict[cloudburst_id][key] cloudbursts_dict = key_ordered_cloudbursts_dict cloudburst_ids = list(cloudbursts_dict.keys()) cloudburst_ids.reverse() desc_cloudbursts_dict = {} for c_id in cloudburst_ids: desc_cloudbursts_dict[c_id] = cloudbursts_dict[c_id] cloudbursts_dict = desc_cloudbursts_dict logger.info('%s :: found %s cloudbursts' % (function_str, str(len(list(cloudbursts_dict.keys()))))) return cloudbursts_dict
def get_anomalies_from_timestamp(current_skyline_app, metric_id, from_timestamp): """ Given a metric_id and timestamp return the anomalies for a metric or all metrics from the given timestamp. """ current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) anomalies = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error( 'error :: get_anomalies_from_timestamp :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_anomalies_from_timestamp :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies_from_timestamp :: could not get a MySQL engine - %s' % str(err)) if engine: try: anomalies_table, fail_msg, trace = anomalies_table_meta( current_skyline_app, engine) if fail_msg != 'anomalies_table meta reflected OK': current_logger.error( 'error :: get_anomalies_from_timestamp :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_anomalies_from_timestamp :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies_from_timestamp :: anomalies_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id: stmt = select([anomalies_table]).\ where(anomalies_table.c.metric_id == metric_id).\ where(anomalies_table.c.anomaly_timestamp >= from_timestamp).\ order_by(anomalies_table.c.id.desc()) else: stmt = select([anomalies_table]).\ where(anomalies_table.c.anomaly_timestamp >= from_timestamp).\ order_by(anomalies_table.c.id.desc()) results = connection.execute(stmt) for row in results: anomaly_id = row['id'] anomalies[anomaly_id] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies_from_timestamp :: failed to build anomalies dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) return anomalies
def get_anomalies(current_skyline_app, metric_id, params={'latest': False}): """ Given a metric_id, return the anomalies for a metric or the latest one if latest is passed as True """ current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) anomalies = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error( 'error :: get_anomalies :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_anomalies :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies :: could not get a MySQL engine - %s' % str(err)) latest = False if params: try: latest = params['latest'] except KeyError: latest = False if engine: try: anomalies_table, fail_msg, trace = anomalies_table_meta( current_skyline_app, engine) if fail_msg != 'anomalies_table meta reflected OK': current_logger.error( 'error :: get_anomalies :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_anomalies :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies :: anomalies_table_meta - %s' % str(err)) try: connection = engine.connect() if latest: stmt = select([ anomalies_table ]).where(anomalies_table.c.metric_id == metric_id).order_by( anomalies_table.c.id.desc()).limit(1) else: stmt = select([ anomalies_table ]).where(anomalies_table.c.metric_id == metric_id).order_by( anomalies_table.c.id.desc()) results = connection.execute(stmt) for row in results: anomaly_id = row['id'] anomalies[anomaly_id] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_anomalies :: failed to build anomalies dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) return anomalies
def get_metric_group(current_skyline_app, metric_id=0): """ Returns the metric_group table row as dict or all the metric_group table rows as dict if metric_id is not passed or is passed as 0. """ current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) metric_group = {} if metric_id: metric_group[metric_id] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error( 'error :: get_metric_group :: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_metric_group :: could not get a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_metric_group :: could not get a MySQL engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta( current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error( 'error :: get_metric_group :: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error( 'error :: get_metric_group :: could not get metric_group_table_meta trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_metric_group :: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id: stmt = select([ metric_group_table ]).where(metric_group_table.c.metric_id == metric_id).order_by( metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results: related_metric_id = row['related_metric_id'] if metric_id: metric_group[metric_id][related_metric_id] = dict(row) else: p_metric_id = row['metric_id'] if p_metric_id not in list(metric_group.keys()): metric_group[p_metric_id] = {} metric_group[p_metric_id][related_metric_id] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error( 'error :: get_metric_group :: failed to build metric_group dict - %s' % str(err)) for mi_key in list(metric_group.keys()): for rmi_key in list(metric_group[mi_key].keys()): for key in list(metric_group[mi_key][rmi_key].keys()): if 'decimal.Decimal' in str( type(metric_group[mi_key][rmi_key][key])): metric_group[mi_key][rmi_key][key] = float( metric_group[mi_key][rmi_key][key]) if 'datetime.datetime' in str( type(metric_group[mi_key][rmi_key][key])): metric_group[mi_key][rmi_key][key] = str( metric_group[mi_key][rmi_key][key]) if key == 'shifted_counts': try: shifted_counts_str = metric_group[mi_key][rmi_key][ key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = metric_group[mi_key][rmi_key][key] metric_group[mi_key][rmi_key][key] = shifted_counts # Remap the metric_id and related_metric_id for clarity metric_group[mi_key][rmi_key]['metric_id'] = rmi_key metric_group[mi_key][rmi_key]['related_to_metric_id'] = mi_key del metric_group[mi_key][rmi_key]['related_metric_id'] if engine: engine_disposal(current_skyline_app, engine) return metric_group