def get_info(self, file_id): """Get all info about a file.""" with DatabaseManager() as db: sql = """ SELECT files.id, files.name, files.desc, files.category_name FROM files JOIN file_tags ON files.id=file_tags.file_id WHERE files.id=? """ cursor = db.execute(sql, (file_id,)) result = cursor.fetchone() if not result: result = {} else: result = dict(result) sql = """ SELECT tag_name FROM file_tags WHERE file_id=? """ result["tags"] = [] cursor = db.execute(sql, (file_id,)) for row in cursor.fetchall(): result["tags"].append(dict(row)["tag_name"]) return json.dumps(result, indent=2)
def mod_file(self, file_id, name, category, description, tags): """Modify file.""" with DatabaseManager() as db: cursor = db.execute("SELECT * FROM files where id=?", (file_id,)) result = cursor.fetchone() if not name: name = result["name"] if not category: category = result["category_name"] if not description: description = result["desc"] if not tags: cur = db.execute( "SELECT tag_name FROM file_tags WHERE file_id=?", (file_id,)) tags = [t["tag_name"] for t in cur.fetchall()] letter_tag = category[0].lower() tags.append(letter_tag) sql_args = (name, category, description, file_id) db.execute("") # insert or ignore if exists db.execute("INSERT OR IGNORE INTO categories(name) values(?)", (category,)) db.execute("UPDATE files SET name = ?, " + "category_name = ?, desc = ? WHERE id = ?", sql_args) db.execute("DELETE FROM file_tags WHERE file_id = ?", (file_id,)) if tags: self.insert_tags(db, tags, file_id)
def getdata(offer, time_frame, fxc): db_date = DatabaseManager().return_date(offer, time_frame) fm_date = db_date + datetime.timedelta(minutes=1) tdn = datetime.datetime.now() + datetime.timedelta(minutes=1) to_date = tdn.replace(second=00, microsecond=00) data = fxc.get_historical_prices(str(offer), fm_date, to_date, str(time_frame)) data = [d.__getstate__()[0] for d in data] data = [x for x in data if db_date not in x.values()] return data
def find_file(self, name, category, description, tags): """Find files.""" sql = """ SELECT files.id, files.name, files.desc, files.category_name FROM files JOIN file_tags ON files.id=file_tags.file_id """ args = [] if name or category or tags or description: sql += " WHERE " multiple = False if name: if multiple: sql += "AND " sql += "name LIKE ? " name = self.surround(name) args.append(name) multiple = True if description: if multiple: sql += "AND " sql += "desc LIKE ? " description = self.surround(description) args.append(description) multiple = True if category: if multiple: sql += "AND " sql += "category_name LIKE ? " category = self.surround(category) args.append(category) multiple = True if tags: if multiple: sql += "AND " sql += "file_tags.tag_name REGEXP ? " tags = [tag.lower() for tag in tags] tags = '|'.join(tags) args.append(tags) sql += "GROUP BY files.id " dict_list = [] with DatabaseManager() as db: cursor = db.execute(sql, tuple(args)) for row in cursor.fetchall(): dict_list.append(dict(row)) return json.dumps(dict_list, indent=2)
def _hist_mining(self): """ Collect events from the Queue """ while True: try: event = self.hist_queue.get(False) except queue.Empty: sleep(0.1) else: if event.type == 'HISTDATA': mp.Process(target=DatabaseManager( ).write_data, args=(event,)).start() elif event.type == 'DBREADY': mp.Process(target=HistoricalCollector( ).historical_prices, args=(self.hist_queue, self.live_queue, event,) ).start() elif event.type == 'OFFER': mp.Process(target=DatabaseManager( ).database_check, args=(self.hist_queue, event,) ).start()
def add_file(self, name, category_name, desc="", tags=None): """Add new file.""" with DatabaseManager() as db: db.setup() # insert or ignore if exists db.execute("INSERT OR IGNORE INTO categories(name) values(?)", (category_name,)) cursor = db.execute( "INSERT INTO files(name, category_name, desc) values(?, ?, ?)", (name, category_name, desc)) letter_tag = category_name[0].lower() if tags: tags.append(letter_tag) else: tags = [letter_tag] self.insert_tags(db, tags, cursor.lastrowid)
def run_main_app(): conf = { '/': { 'tools.staticdir.root': os.getcwd(), 'tools.encode.on': True, 'tools.encode.encoding': 'utf-8', 'tools.sessions.on': True, 'tools.sessions.timeout': 60 * 1, #timeout is in minutes, * 60 to get hours # The default session backend is in RAM. Other options are 'file', }, '/static': { 'tools.staticdir.on': True, 'tools.staticdir.dir': 'static', }, } cherrypy.site = { 'base_path': os.getcwd() } database = DatabaseManager() database.init_db(DB_NAME) if CREATE_TEST_DATA: database.create_test_data() main_app = server.MainApp() main_app.api = api.base_api.BaseApi(database) cherrypy.tree.mount(main_app, "/", conf) cherrypy.config.update({'server.socket_host': LISTEN_IP, 'server.socket_port': LISTEN_PORT, 'engine.autoreload.on': True, }) print("========================================") print(" Softeng 701 Server") print("========================================") cherrypy.engine.start() cherrypy.engine.block()
def _live_data_session(self, fxc): """ """ live_offers = [] while True: try: event = self.live_queue.get(False) except queue.Empty: sleep(0.1) else: if event.type == 'LIVEDATA': mp.Process(target=DatabaseManager().write_data, args=(event, )).start() elif event.type == 'GETLIVE': mp.Process(target=self._get_live, args=( event, live_offers, )).start() elif event.type == 'LIVEREADY': if event.offer not in live_offers: print("[oo] Live Started %s" % event.offer) live_offers.append(event.offer)
def rm_file(self, file_id): """Remove file.""" with DatabaseManager() as db: db.execute("DELETE FROM files where id=?", (file_id,))
from flask import Flask, render_template, jsonify, request from flask_cors import CORS, cross_origin from db_manager import DatabaseManager import requests, os, json # Statics app = Flask(__name__) CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' db = DatabaseManager() # Helper functions def null_parameter(): return jsonify({ "error": "parameters cannot be null" }), 400 def check_country(code): return code in ["FI", "FRA", "SPA"] def invalid_country_code(): return jsonify({"error": "country code is not valid"}), 400 def check_screen(type): return type in ["billboard", "standing", "small"] def invalid_screen_type(): return jsonify({"error": "screen type is not valid"}), 400 # signup a new agency @app.route("/sign_up", methods=['POST']) @cross_origin() def signup(): username = request.args.get("username", default = None)
#settings = None #settings_lock = threading.Lock() settings_wrapper = Wrapper(None) managers_answers = {1 : [], 2 : [], 3 : []} #managers_answers_lock = threading.Lock() managers_answers_wrapper = Wrapper(managers_answers) bot = VK_Bot(TOKEN, ID, VERSION, settings_wrapper, managers_answers_wrapper) l = Loader(sys.argv[1]) l.dump_file_name = sys.argv[2] #settings = l.settings #settings_wrapper.payload = settings settings_wrapper.payload = l.settings logging.debug('Загружены настройки.') dbm = DatabaseManager(settings_wrapper.payload['connection_settings']['host'], settings_wrapper.payload['connection_settings']['username'], settings_wrapper.payload['connection_settings']['passwd'], settings_wrapper.payload['connection_settings']['database']) bot_thread = threading.Thread(target = bot.polling, daemon = True) bot_thread.start() logging.debug('Бот включен.') #dbm, settings_wrapper, managers_answers_wrapper, l prog = Program() main_thread = threading.Thread(target = prog.mainloop, args = (bot, dbm, settings_wrapper, managers_answers_wrapper, l), daemon = True) main_thread.start() input() bot.stop() prog.stop()
def historical_prices(hist_queue, live_queue, event): """ Contacts the database and retrives the latest date, then continues with the historical data mining to the present date """ def collect_data(fxc, instrument, time_frame, dbdate): """ Gets the data """ time_delta = TimeDelta().get_delta(time_frame) to_date = None fm_date, to_date = DateRange().get_date_block( time_delta, dbdate, to_date) log(instrument).debug("[>>] Starting Block : %s %s %s %s" % \ (instrument, str(fm_date), str(to_date), time_frame)) breakout = 0 while True: breakdate = datetime.datetime.now() # - datetime.timedelta(minutes = 5) if to_date >= breakdate or fm_date >= breakdate: breakout = 1 d = datetime.datetime.now() to_date = d.replace(second=00, microsecond=00) try: data = fxc.get_historical_prices( str(instrument), fm_date, to_date, str(time_frame)) data = [d.__getstate__()[0] for d in data] data = [x for x in data if dbdate not in x.values()] except (KeyError, IndexError): data = [] if data != []: hist_queue.put(HistDataEvent( data, instrument, time_frame)) log(instrument).debug("[:)] Data Collected : %s %s %s %s" % \ (instrument, str(fm_date), str(to_date), time_frame)) fm_date, to_date = DateRange().get_date_block( time_delta, fm_date, to_date) else: log(instrument).debug("[??] Skipping Block : %s %s %s %s" % \ (instrument, str(fm_date), str(to_date), time_frame)) fm_date, to_date = DateRange().get_date_block( time_delta, fm_date, to_date) del data if breakout == 1: break fxoffer = event.fxoffer while True: try: fxc = fx.ForexConnectClient(s.FX_USER, s.FX_PASS, s.FX_ENVR, s.URL ) if fxc.is_connected() == True: break except RuntimeError: pass for offer, time_frames in fxoffer.iteritems(): for time_frame in time_frames: dbdate = DatabaseManager().return_date(offer, time_frame) collect_data(fxc, offer, time_frame, dbdate) log(offer).debug("[^^] TFrame Complete : %s |%s|" % (offer, time_frame)) log(offer).debug("[<>] Offer Complete : %s |%s|" % (offer, time_frame)) print("[^^] Hist complete : %s" % offer) live_queue.put(LiveReadyEvent(offer))