def setUp(self): try: os.remove(self.FILENAME) except Exception as e: pass self._table = Table(self.FILENAME) self.db_manage = DBManager(self._table)
def aggregator(es): db = DBManager() while True: time.sleep(1) for (endhost, burstid), burst in es.sent_echos.items(): try: #check for a valid burst with time expired if not valid_burst(burst): continue rtt = [packet['rtt'] for packet in burst if packet['rtt'] is not None] es.del_sent_echo(endhost, burstid) #100% loss. end host is probably dead if len(rtt) == 0: es.delete_targets(endhost, burst[0]['through']) continue latency = numpy.average(rtt) jitter = numpy.std(rtt) loss = float((BURST_SIZE - len(rtt)) / BURST_SIZE) * 100 db.store_rtt(endhost, str(burst[0]['through'].id_), latency, jitter, loss, time.gmtime(burst[0]['sent_at'])) except Exception, e: print e
def create_app(config_filepath = "resource/config.cfg"): from codeMarble_config import * try: dataDir = os.path.join(thisPath, 'codeMarble_Web', 'static', 'problems') print dataDir os.mkdir(dataDir) except Exception as e: print e pass app.config.from_object(codeMarbleConfig) app.config.from_pyfile(config_filepath, silent=True) from codeMarble_py3des import TripleDES TripleDES.init() from codeMarble_logger import Log Log.init() from cache_session import SimpleCacheSessionInterface app.session_interface = SimpleCacheSessionInterface() from database import DBManager DBManager.init(app.config['DB_URL']) DBManager.init_db() from controller import * from codeMarble_blueprint import codeMarble app.register_blueprint(codeMarble) return app
def parse_station(station, start_date, end_date): with DBManager() as db: stations_timetables = db.plan_of_station(station, date1=start_date, date2=end_date) parsed = parse_timetable(stations_timetables, db) if parsed: parsed = pd.DataFrame(parsed, columns=empty_rtd.keys()) parsed = parsed.set_index('hash_id') # Remove duplicates. Duplicates may happen if a stop is shifted to the next hour due to delays. # It than reappears in the planned timetable of the next hour. parsed = parsed.loc[~parsed.index.duplicated(keep='last')] parsed['station'] = station parsed[['ar_dc', 'ar_hi', 'dp_dc', 'dp_hi']] = parsed[['ar_dc', 'ar_hi', 'dp_dc', 'dp_hi']] == '1' parsed = add_distance(parsed) current_array_cols = [ col for col in RtdArrays.__table__.columns.keys() if col in parsed.columns ] # There are many columns that contain arrays. These take up a lot of space and aren't # used after parsing, so we currently don't store them in the database # rtd_arrays_df = parsed.loc[:, current_array_cols] # rtd.upsert_arrays(rtd_arrays_df) rtd_df = parsed.drop(current_array_cols, axis=1) with DBManager() as db: db.upsert_rtd(rtd_df) return True
def __init__(self, symbol=None, start_date=None, end_date=None, amount=None): self.symbol = symbol self.start_date = start_date self.end_date = end_date self.investment_amount = amount # überprüfen, ob daten am start_datum schon vorhanden waren db = DBManager(self.symbol) if db.get_closest_day(start_date).close == -1: raise DataNotInDataset self.set_name() # self.check_data_exists() self.ask_input() self.trans_db = TransactionsDBManager(self.symbol) self.simulate_strategy() self.print_roi()
class DirectoryManager: BASE_DIR = "dbs" def __init__(self): self.manage_DB = DBManager() self.conn = connect(self.BASE_DIR + '/document.db') self.c = self.conn.cursor() return def get_directory_path(self, directoryid): # Get the information for the supplied directory. res = self.manage_DB.get_info('directory', rowid=directoryid) path_logical = '' path_physical = '' # While the parent_dir exist. The parent of the root directory # does not exist. while res: # Add the name/id of the parent directory to the path. path_logical = '%s/%s' % (res['name'], path_logical) path_physical = '%s/%s' % (res['id'], path_physical) # Get the information for the parent directory. res = self.manage_DB.get_info('directory', rowid=res['parent_dir']) return path_logical, path_physical def create_directory(self, directoryid): # Get the local file system path for the supplied directory. path_logical, path_physical = self.get_directory_path(directoryid) # The supplied folder name does not exist at the supplied directory. if not os.path.isdir('%s' % path_physical): # Create the folder with the supplied folder name. os.mkdir('%s' % path_physical) return True # The supplied folder name does exist at the supplied directory. else: return False def get_directory_directories(self, directoryid): # Query for all the directories in the supplied directory. res = self.manage_DB.get_info('directory', where={ 'parent_dir': directoryid}) for row in res: # Create a dictionary with the results and add the dictionary to # the list. row['parent'] = self.manage_DB.get_info( 'directory', rowid=row['parent_dir'])['name'] # Return the list of results. return res
def get_graph_data(symbol, table, dict): print(symbol, table) trans_db = TransactionsDBManager(symbol, table) db = DBManager(symbol) running_date = start_date while running_date < end_date: if running_date not in dict.keys(): dict[running_date] = 0 transaction = trans_db.get_latest_transaction(running_date) dict[running_date] += transaction.depotkonto + \ transaction.count * decimal.Decimal(db.get_closest_raw_day(running_date).close) running_date += datetime.timedelta(days=1)
def __init__(self, args): """ Class initialization """ super(FleetCommanderDbusService, self).__init__() self.home_dir = os.path.expanduser('~') if 'state_dir' in args: self.state_dir = args['state_dir'] else: # Set state dir to $HOME/.local/share/fleetcommander self.state_dir = os.path.join(self.home_dir, '.local/share/fleetcommander') if not os.path.exists(self.state_dir): os.makedirs(self.state_dir) self.database_path = os.path.join(self.state_dir, 'fleetcommander.db') self.args = args self.log_level = args['log_level'].lower() loglevel = getattr(logging, args['log_level'].upper()) logging.basicConfig(level=loglevel, format=args['log_format']) self.default_profile_priority = args['default_profile_priority'] # Load FreeIPA connector self.ipa = fcfreeipa.FreeIPAConnector() self.GOA_PROVIDERS_FILE = os.path.join(args['data_dir'], 'fc-goa-providers.ini') # Initialize database self.db = DBManager(self.database_path) # Initialize change mergers self.changemergers = { 'org.gnome.gsettings': mergers.GSettingsChangeMerger(), 'org.libreoffice.registry': mergers.LibreOfficeChangeMerger(), 'org.chromium.Policies': mergers.ChromiumChangeMerger(), 'com.google.chrome.Policies': mergers.ChromiumChangeMerger(), 'org.mozilla.firefox': mergers.FirefoxChangeMerger(), 'org.freedesktop.NetworkManager': mergers.NetworkManagerChangeMerger(), } # Initialize SSH controller self.ssh = sshcontroller.SSHController() self.known_hosts_file = os.path.join(self.home_dir, '.ssh/known_hosts') # Timeout values self.tmp_session_destroy_timeout = float( args['tmp_session_destroy_timeout']) self.auto_quit_timeout = float(args['auto_quit_timeout'])
def __init__(self, database_path, profiles_dir): """ Class initialization """ self.PROFILES_DIR = profiles_dir self.INDEX_FILE = os.path.join(profiles_dir, 'index.json') self.APPLIES_FILE = os.path.join(profiles_dir, 'applies.json') # Setup database self.db = DBManager(database_path) self.profiles = self.db.profiles
def get_csv_from_feedback(csv_file_location): """ get_csv_from_feedback: cleans the database, and dumps the result into a csv file Args: csv_file_location (string): location of the csv file to generate Returns: void """ db_manager = DBManager(c.feedback_db) feedbacks = db_manager.get_all_emotion_feedbacks() db_manager.clear_tables() with open(csv_file_location, 'wb') as output: writer = csv.writer(output) writer.writerow(["headline"] + c.sentiment_lookup) for (headline, emotions) in feedbacks: writer.writerow([headline] + emotions)
def __init__(self, args): """ Class initialization """ super(FleetCommanderDbusService, self).__init__() if 'profiles_dir' not in args: args['profiles_dir'] = os.path.join(args['state_dir'], 'profiles') if not os.path.exists(args['profiles_dir']): os.mkdir(args['profiles_dir']) self.args = args self.state_dir = args['state_dir'] self.log_level = args['log_level'].lower() loglevel = getattr(logging, args['log_level'].upper()) logging.basicConfig(level=loglevel, format=args['log_format']) self.profiles = profiles.ProfileManager(args['database_path'], args['profiles_dir']) # Load previous missing profiles data for retrocompatibility self.profiles.load_missing_profiles_data() self.profiles_dir = args['profiles_dir'] self.GOA_PROVIDERS_FILE = os.path.join(args['data_dir'], 'fc-goa-providers.ini') # Initialize database self.db = DBManager(args['database_path']) # Initialize collectors self.collectors_by_name = { 'org.gnome.gsettings': collectors.GSettingsCollector(self.db), 'org.libreoffice.registry': collectors.LibreOfficeCollector(self.db), 'org.freedesktop.NetworkManager': collectors.NetworkManagerCollector(self.db), } # Initialize SSH controller self.ssh = sshcontroller.SSHController() self.known_hosts_file = '/root/.ssh/known_hosts' self.webservice_host = args['webservice_host'] self.webservice_port = int(args['webservice_port']) self.client_data_url = args['client_data_url'] self.tmp_session_destroy_timeout = float( args['tmp_session_destroy_timeout'])
def create_app(config_filepath = "resource/config.cfg"): from codeMarble_config import codeMarbleConfig app.config.from_object(codeMarbleConfig) app.config.from_pyfile(config_filepath, silent=True) from codeMarble_py3des import TripleDES TripleDES.init() from codeMarble_logger import Log Log.init() from cache_session import SimpleCacheSessionInterface app.session_interface = SimpleCacheSessionInterface() from database import DBManager DBManager.init(app.config['DB_URL']) DBManager.init_db() from controller import * from codeMarble_blueprint import codeMarble app.register_blueprint(codeMarble) return app
from flask import * import os import util from database import DBManager app = Flask(__name__) database = DBManager() host = '0.0.0.0' port = int(os.environ['PORT']) if 'PORT' in os.environ else 5000 base_url = os.environ['APP_URL'].rstrip('/') if 'APP_URL' in os.environ else "%s:%d" % (host, port) if 'OPTIMEET_DEBUG' in os.environ: debug = os.environ['OPTIMEET_DEBUG'] in [ "True", "true", "T", "t", "1" ] else: debug = True @app.route('/') def index(): return render_template('halloween.html') @app.route('/about') def about(): return render_template('about.html') @app.errorhandler(404) def page_not_found(error): return render_template('page_not_found.html'), 404 @app.route('/create-event') def create_event(): if request.args: eventname = request.args.get('eventName')
class DBTest(TestCase): FILENAME = ":memory:" def setUp(self): try: os.remove(self.FILENAME) except Exception as e: pass self._table = Table(self.FILENAME) self.db_manage = DBManager(self._table) def tearDown(self): self._table = None self.db_manage = None def test_add(self): self.assertEqual(len(self._table.list()), 0) record = Record("a", "b", "c") self._table.add(record) self.assertEqual(len(self._table.list()), 1) def test_delete(self): record = Record("a", "b", "c") self._table.add(record) record = self._table.get("a") with self.assertRaises(ErrorDatabase): self._table.delete("test") self._table.delete(record.pk) self.assertEqual(len(self._table.list()), 0) def test_get_non_exists(self): record = self._table.get(5) self.assertIsNone(record) record = self._table.get("test") self.assertIsNone(record) def test_get_password(self): record = Record("a", "b", "c") self._table.add(record) result = self.db_manage.get_password("a") self.assertIsNotNone(result) result2 = self.db_manage.get_password("c") self.assertIsNotNone(result2) result3 = self.db_manage.get_password("d") self.assertIsNone(result3) self.assertEqual(result[0], result2[0]) self.assertEqual(result[1], result2[1]) def test_wrong_db_file(self): with mock.patch.object(database.db, 'connect', side_effect=IOError("1")): with self.assertRaises(ErrorDatabase): table = Table(self.FILENAME) def test_cant_create_table(self): table = Table(self.FILENAME) with mock.patch.object(table, '_connection') as mock_connection: mock_connection.execute = mock.Mock(side_effect=ValueError("1")) with self.assertRaises(ErrorDatabase): table.create_table_if_not_exists() def test_cant_add_record(self): record = Record("a", "b", "c") with mock.patch.object(self._table, '_connection') as mock_connection: mock_connection.execute = mock.Mock(side_effect=ValueError("1")) with self.assertRaises(ErrorDatabase): self._table.add(record) def test_cant_get(self): count_before = len(self._table.list()) record = Record("test_cant_get", "test_cant_get", "test_cant_get") self._table.add(record) record = self._table.get("test_cant_get") self.assertIsNotNone(record) pk = record.pk with mock.patch.object(self._table, '_connection') as mock_connection: mock_connection.cursor = mock.Mock(side_effect=ValueError("1")) record = self._table.get("test_cant_get") self.assertIsNone(record) self._table.delete(pk) self.assertEqual(len(self._table.list()), count_before) def test_ui(self): with mock.patch.object(sys, 'argv', ["database.py"]): with self.assertRaises(SystemExit): database.main() with mock.patch.object(sys, 'argv', ['database.py', 'test.db']): with mock.patch.object(database.Table, '__init__', side_effect=database.ErrorDatabase()): with self.assertRaises(SystemExit): database.main()
def __init__(self): self.manage_DB = DBManager() self.conn = connect(self.BASE_DIR + '/document.db') self.c = self.conn.cursor() return
end_date = datetime.datetime.strptime(lines.pop(0), "%Y-%m-%d").date() symbols = lines invest_per_symbol = invest_amount / len(symbols) tables_avg200 = [] tables_avg200_3 = [] values_BnH = {} values_avg200 = {} values_avg200_3 = {} real_symbols = [] for symbol in symbols: db = DBManager(symbol) try: if not data_handler.scrape_and_save_raw_prices_to_db(db): data_handler.analyze_data_and_save_to_db(db) db.close() except scrape.SymbolNotFound: print("Symbol not Found - Skipping") continue try: buynhold = BuyAndHold(symbol=symbol, start_date=start_date, end_date=end_date, amount=invest_per_symbol) avg200 = AVG200Strat(percents=1.0, symbol=symbol,
def index(): return 'procedure test' @app.route('/procedure/<num1>/<num2>') def procedure_test(num1, num2): cursor = DBManager.conn.cursor() try: cursor.callproc('multiply', (num1,num2,0)) cursor.execute('select @_multiply_2') results = list(cursor.fetchone()) return str(results[0]) finally: cursor.close() @app.route('/insertbook') def insertbook(): cursor = DBManager.conn.cursor() try: cursor.execute('select books.bookID from books order by bookID * 1 DESC limit 1') last_idx = int(cursor.fetchone()[0]) last_idx = last_idx+1 cursor.callproc('INSERT_BOOK', (last_idx, 'newbook', 32000, 'novel', 0)) cursor.execute('select @_INSERT_BOOK_0, @_INSERT_BOOK_4') result = cursor.fetchone() return "책 insert 결과 : "+str(result) finally: cursor.close() if __name__ == '__main__': DBManager.init() app.run(port=2323, debug=True)
class IndexManager: BASE_DIR = "dbs" def __init__(self): self.manage_DB = DBManager() self.conn = connect(self.BASE_DIR + '/index.db') self.c = self.conn.cursor() return def add_index_word(self, root, docid, line, column, branch_word): # Search for the root word in the index word table. res = self.manage_DB.get_index_word_info(word=root) # The root word does not exist in the table. if not res: # Insert the root word into the index_word table. self.manage_DB.insert_index_word(root) # Get the id of the newly inserted root word. wordid = self.manage_DB.get_index_word_info(word=root)['id'] # The root word does exist in the table. else: # Get the id of the root word. wordid = res['id'] # Search for a reference with the supplied information. res = self.manage_DB.get_index_ref_info( wordid=wordid, docid=docid, line=line, column=column) # A reference with the supplied information does not already exist. if not res: # Insert a reference with the supplied information and return the # the result of the insertion. return self.manage_DB.insert_index_ref( wordid, docid, line, column, branch_word) # A reference with the supplied information already exist, so return # False. else: return False def search(self, word): # Create an instance of the Porter Stemmer. PS = PorterStemmer() # Get the information for the supplied word. res = self.manage_DB.get_index_word_info( PS.stem(word, 0, len(word) - 1)) # The supplied word exist in the index_word table. if res: # Extract the id for the supplied word. wordid = res['id'] # Return the found entries as a list. res = [] # Query the index_ref table for all the entries whose wordid # match the supplied word's id. self.c.execute("""select * from index_ref where wordid=?""", (wordid,)) # Retrieve all the results of the query as a list. entries = self.c.fetchall() # For ever entry in the list. for row in entries: # Create a dictionary with the results and add the dictionary # to the list. res.append({ 'id': row[0], 'word': self.manage_DB.get_index_word_info(row[1])['word'], 'docid': row[2], 'doc': self.manage_DB.get_document_info(row[2])['name'], 'line': row[3], 'column': row[4], 'branch_word': row[5]}) # Return the list of all the results. return res # The supplied word does not exist in the index_word table, so return # and empty list. else: return []
from flask import Flask, render_template import requests import dill from HeadlineSentiment import SentimentAnalyzer import json import os from database import DBManager import constants as c app = Flask(__name__) db_manager = DBManager() sentiment_lookup = ['anger', 'disgust', 'fear', 'joy', 'sadness', 'surprise'] headline_lookup = [ 'Angry', 'Disgusting', 'Fearful', 'Happy', 'Sad', 'Surprising' ] key = open("google_news.key").read()[:-1] api_endpt = "https://newsapi.org/v1/articles?apiKey={}&source=google-news".format( key) def get_news(): response_dict = {} r = requests.get(api_endpt, data={"source": "google-news", "apiKey": key}) r = json.loads(r.text) sentiment_tally = [0, 0, 0, 0, 0, 0] h_data = [] for article in r['articles']: headline = article['title'] data = {'headline': headline} ranked_list = sorted(analyzer.predict_all(headline),