def get_episode_list(): request_str = request.body.read() try: request_str = request_str.decode('utf-8') input_obj = json.loads(request_str) except: return '{"error":"Failed to decode request text"}' speaker = input_obj['speaker'] dialog_id = input_obj['dialog_id'] DBManager.initialize(host='kbox.kaist.ac.kr', port=3142, user='******', password='******', db='KoreanWordNet2', charset='utf8', autocommit=True) result = DBManager.executeQuery( 'select * from Friends_CONLL_TBL where FND_Dialog_ID=' + str(dialog_id)) result.insert(0, {'POS_text': speaker, 'lemma': speaker, 'POS_tag': 'NN'}) entityLinker = DummyEntityLinker() link_list, parse_result = entityLinker.entitylink(result) result_obj = {'parse_result': parse_result, 'link_list': link_list} return json.dumps(result_obj)
def get_episode_list(): request_str = request.body.read() try: request_str = request_str.decode('utf-8') input_obj = json.loads(request_str) except: return '{"error":"Failed to decode request text"}' episodeid = input_obj['episode_id'] DBManager.initialize(host='kbox.kaist.ac.kr', port=3142, user='******', password='******', db='KoreanWordNet2', charset='utf8', autocommit=True) result = DBManager.executeQuery( 'select * from Friends_Dialog_TBL where FND_Episode_ID="' + episodeid + '"') for i, item in enumerate(result): result[i]['FND_Dialog_ID'] = int(result[i]['FND_Dialog_ID']) result[i]['Dialog'] = result[i]['Dialog'].decode('utf-8') reulst = sorted(result, key=itemgetter('FND_Dialog_ID')) return json.dumps(result)
def create_oscil(self, period): self.output_table_name = self.output_table_name.replace("TREND", "POINT") self.output_table_name = self.output_table_name.replace("CANDLE", "POINT") self.output_table_name = self.output_table_name + self.OSCIL + "_" + str(period) if self.to_save: ##if already exists, drop it first and then recreate if DBManager.exists_table(self.output_table_name): DBManager.drop_table(self.output_table_name) pt = PointTable(self.output_table_name) pt.save() oscil_pt_array = [] for i, pt in enumerate(self.input_points): if i < (period-1): ##don't calculate stddev for first points since there is not enough history available pass else: date = pt.date oscil_value = pt.value - self.input_points[i-period].value oscil_pt = Point(self.output_table_name, date, oscil_value) oscil_pt_array.append(oscil_pt) if self.to_save: self.save_pts(oscil_pt_array) return oscil_pt_array
def upload_file(): if request.method == 'POST': # check if the post request has the file part if 'file' not in request.files: flash('No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': flash('No selected file') return redirect(request.url) if file and allowed_file(file.filename): filename = secure_filename(file.filename) filename_w_prefix = ''.join([str(time.time()), '_', filename.lower()]) file.save(os.path.join(application.config['UPLOAD_FOLDER'], filename_w_prefix)) return redirect(url_for('uploaded_file', filename=filename_w_prefix)) sql_lite_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db.sqlite') dbm = DBManager(sql_lite_file) happy = dbm.load_results(1) unhappy = dbm.load_results(0) return render_template('home.html', title='Happiness Recognizer', happy=happy, unhappy=unhappy)
def _on_key_release(self, widget, event): if event.keyval == 65535: # delete key sel = widget.get_selection().get_selected() item_id = self.liststore.get(sel[1], 3)[0] dbm = DBManager(db_loc) dbm.delete(item_id) self.liststore.remove(sel[1])
def create_moving_avg_simple(self, num_history_pts): self.output_table_name = self.output_table_name.replace("TREND", "POINT") self.output_table_name = self.output_table_name.replace("CANDLE", "POINT") self.output_table_name = self.output_table_name + self.SIMPLE_AVG + "_" + str(num_history_pts) if self.to_save: ##if already exists, drop it first and then recreate if DBManager.exists_table(self.output_table_name): DBManager.drop_table(self.output_table_name) pt = PointTable(self.output_table_name) pt.save() points = self.input_points mv = MovingAverage(self.output_table_name, points) pt_array = mv.simple(num_history_pts) if self.to_save: self.save_pts(pt_array) ## possible delete the temporary point table created from candle ##if CandleTable.TEMP in self.input_point_table_name: ## DBManager.drop_table(self.input_point_table_name) return pt_array
def __init__(self, is_coinbase, encryption_pass=None): if not encryption_pass: encryption_pass = getpass.getpass("Encryption password: "******"gmail_password"], default_config.notification_receiver, ) else: self.email_notification = None if is_coinbase and default_config.withdraw_btc_threshold: self.address_selector = AddressSelector( self.secrets["master_public_key"], default_config.withdraw_beginning_address, ) self.db_manager = DBManager() self.next_robinhood_buy_datetime = self.calcRobinhoodFirstBuyTime() if is_coinbase: Logger.info("\n\n\n") Logger.info("----------------------") Logger.info("----------------------") Logger.info("Coinbase DCA started") Logger.info("") self.coinbase_pro = self.newCoinbaseProClient() self.next_buy_datetime = self.calcFirstBuyTime()
def __init__(self, config): self.config = config self.batch_size = config.batch_size self.num_epochs = config.num_epochs self.shuffle_buffer = config.shuffle_buffer self.db_manager = DBManager(config) self.make_dicts_for_labels()
def search_employee_text(self): """This is the menu where the user enters a text string and is presented with all employee names containing that string """ print('FIND EMPLOYEE NAME USING TEXT STRING') print("Enter the text string to search on") input_text = input("> ") text_string = input_text # load db dbm = DBManager() employee_names = dbm.view_names_with_text(text_string) for i, value in enumerate(employee_names): print("{}) {}".format(i + 1, value['name'])) selected_employee = None while selected_employee is None: user_input = input("> ") # perform input validation try: user_input = int(user_input) - 1 except ValueError: print("Invalid value, try again") continue if user_input < 0: print("Value out of range. Try again.") continue try: selected_employee = employee_names[user_input]['name'] except IndexError: print("Value out of range. Try again.") continue # when an employee is selected, show all the entries with that e'ee matching_records = dbm.view_everything(employee=selected_employee) self.records = matching_records self.current_record = 0 return self.present_next_result
def search_employee(self): """This is the menu where the user is given a list of all employees who have entries, and can select a particular employee to see all their entries """ print("\nSEARCH BY EMPLOYEE") # load the db manager dbm = DBManager() employee_names = dbm.view_employees() for i, value in enumerate(employee_names): print("{}) {}".format(i + 1, value['name'])) selected_employee = None while selected_employee is None: user_input = input("> ") # perform input validation try: user_input = int(user_input) - 1 except ValueError: print("Invalid value, try again") continue if user_input < 0: print("Value out of range. Try again.") continue try: selected_employee = employee_names[user_input]['name'] except IndexError: print("Value out of range. Try again.") continue # when an employee is selected, show all the entries with that e'ee matching_records = dbm.view_everything(employee=selected_employee) self.records = matching_records self.current_record = 0 return self.present_next_result
def search_exact_date(self): """This is the menu where the user browses dates and entries and picks the date from a list """ print("\nSEARCH EXACT DATE") # load the db manager dbm = DBManager() date_records = dbm.view_dates() for i, value in enumerate(date_records): value = self.date_to_string(value['date']) print("{}) {}".format(i + 1, value)) selected_date = None while selected_date is None: user_input = input("> ") # perform input validation try: user_input = int(user_input) - 1 except ValueError: print("Invalid value, try again") continue if user_input < 0: print("Value out of range. Try again.") continue try: selected_date = date_records[user_input]['date'] except IndexError: print("Value out of range. Try again.") continue # when a date is selected, show all the entries with that date matching_records = dbm.view_entries_for_date(selected_date) self.records = matching_records self.current_record = 0 return self.present_next_result
def get(self, id): resp = {'Booking Successful': False} print 'id:', id db_mgr = DBManager() session = db_mgr.get_session() results = [] try: qry = session.query(ParkingSpots).filter( ParkingSpots.id == int(id)).update({'available': False}) session.commit() qry = session.query(ParkingSpots.available).filter( ParkingSpots.id == int(id)).all() print qry if qry[0][0] == False: resp['Booking Successful'] = True resp['Your booking ID'] = id except Exception as e: print 'Error:', e finally: db_mgr.close_session(session) return resp
def player_check(userId, token): db = DBManager().get_db_cursor(Database) db.execute("select * from %s where UserId=%s" % (Table, userId)) player = db.fetchone() if player and player[3] == token: return True return False
def fetch_workshop_papers(db_manager: DBManager, base_url: str, conf_id: str) -> None: """ Fetches data from workshop papers. """ main_conf_id = conf_id.replace('_workshops', '') page_url = base_url + conf_id + '/menu' print('Workshop: ' + page_url) with urllib.request.urlopen(page_url) as url: response = url.read() soup = BeautifulSoup(response, 'html.parser') workshops_meta_list = soup.find_all('dd') workshop_ids_list = [] for m in workshops_meta_list: id_link = m.find('a').get('href') if '.py' in id_link: id_link = id_link.replace('.py', '').replace(' ', '_') else: id_link = id_link.split('/')[-1] workshop_ids_list.append(id_link) workshop_names_list = [ str(m.find('a').string) for m in workshops_meta_list ] workshop_names_list = [conf_id + ' - ' + n for n in workshop_names_list] for i in range(len(workshop_names_list)): workshop_base_url = base_url + conf_id + '/' list_url = workshop_base_url + workshop_ids_list[i] conf_sub_id = workshop_ids_list[i] conf_sub_id = conf_sub_id[conf_sub_id.find('_') + 1:] if conf_sub_id.lower() == '../menu': continue fetch_papers(db_manager, workshop_base_url, list_url, main_conf_id, conf_sub_id, workshop_names_list[i]) db_manager.write_db()
def test_short(): total_profit = 1 total_balance = 0 total_balance_bitsec = 0 date1 = date_to_timestamp("2016-6-1") for i in range(1): date2 = date1 + 10 * 30 * 2 * HALF_DAY tn = CandleFetcher.cut_table(table_names.BTC_300, date1, date2) strat = ShortTermStrategy(tn, calc_stats=False) ##strat = BollingerStrategy(tn, set_default = True) (profit, balance, balance_bitsec) = test_against_hold(strat) total_profit *= 1 + profit total_balance += balance total_balance_bitsec += balance_bitsec date1 = date2 ##sc = StatCalculator(tn) ##volatility = sc.get_volatility() ##volume = sc.get_volume() ##print ("Volatility:", volatility) ##print ("Volume:", volume) DBManager.drop_table(tn) print("total profit:", total_profit) print("total balance:", total_balance) print("total balance bitsec:", total_balance_bitsec)
def __init__(self, data_dir, coord, symbol_list, year_range, symbol_first, data_win_len, receptive_field, queue_size=500): # system initialize self.db_manager = DBManager(data_dir) self.preprocessor = Preprocessor() self.coord = coord self.threads = [] # processing params self.data_dir = data_dir self.symbol_list = symbol_list self.year_range = year_range self.symbol_first = symbol_first self.data_win_len = data_win_len self.receptive_field = receptive_field # queue setup self.trans_placeholder = tf.placeholder(dtype=tf.float32, shape=None) self.trans_queue = tf.PaddingFIFOQueue(queue_size, ['float32'], shapes=[(None, 1)]) self.trans = self.trans_queue.enqueue([self.trans_placeholder]) # for multithreading: self.yield_list = itertools.product( self.symbol_list, self.year_range) if self.symbol_first else itertools.product( self.year_range, self.symbol_list)
def preprocess(self): ##initilize all bits to 0, get symbols for tn in self.table_name_array: self.bits_array.append(0) self.bits_end_array.append(0) self.symbol_array.append(CandleTable.get_target_currency(tn)) self.total_bits_bought_array.append(0) ##get candle arrays self.candles_array = [] for tn in self.table_name_array: candles = CandleTable.get_candle_array(tn) self.candles_array.append(candles) for s in self.strategy_array: if hasattr(s, "adapter") and self.balance_limit is not None: s.adapter.set_limit(self.balance_limit) ##create trade tables if self.to_log: for i, tn in enumerate(self.table_name_array): trade_table_name = TradeTable.calc_name( tn, self.strategy_array[i].get_name()) trade_table = TradeTable(trade_table_name) if DBManager.exists_table(trade_table_name): DBManager.drop_table(trade_table_name) trade_table.save() self.trade_table_name_array.append(trade_table_name) self.strategy_array[i].trade_table_name = trade_table_name self.trades_array.append([])
def create_stddev(self, num_history_pts): self.output_table_name = self.output_table_name.replace("TREND", "POINT") self.output_table_name = self.output_table_name.replace("CANDLE", "POINT") self.output_table_name = self.output_table_name + self.STDDEV + "_" + str(num_history_pts) if self.to_save: ##if already exists, drop it first and then recreate if DBManager.exists_table(self.output_table_name): DBManager.drop_table(self.output_table_name) pt = PointTable(self.output_table_name) pt.save() orig_pt_array = self.input_points stddev_pt_array = [] for i, pt in enumerate(orig_pt_array): if i < num_history_pts: ##don't calculate stddev for first points since there is not enough history available pass else: date = pt.date stddev = StandardDeviation.simple(orig_pt_array[i-num_history_pts + 1: i+ 1]) stddev_pt = Point(self.output_table_name, date, stddev) stddev_pt_array.append(stddev_pt) if self.to_save: self.save_pts(stddev_pt_array) ##if CandleTable.TEMP in self.input_point_table_name: ## DBManager.drop_table(self.input_point_table_name) return stddev_pt_array
def btn_dbmanager_react(self): """Slot for Signal"clicked()" of "DB manager" button """ selectedServer = self.ui.tlw_servers.model().data(self.ui.tlw_servers.currentIndex(), SERVER_INFO_ROLE) dbmanager_win = DBManager(self.server_view_list,self, self.MAIN_DB,selectedServer) dbmanager_win.show() self.dbmanager_windows.append(dbmanager_win)
def get_new_signals(self, tn_index): tn = self.trader_tables[tn_index] signal_tn = self.signal_table_names[tn_index] cur_date = int(time()) last_date = SignalTable.get_last_date(signal_tn) period = float(CandleTable.get_period(tn)) ##cut the candle table to get one of a more manageable size cut_table_name = CandleFetcher.cut_table(tn, int(cur_date - 5*ShortTermStrategy.DATA_PAST*period)) candles = CandleTable.get_candle_array(cut_table_name) ##new_candle_index = self.find_new_candle_index(candles, last_date) new_signals = [] ##run a strategy on the candles and store the resulting operations returned strat = self.strat_array[tn_index] sym = SignalTable.get_sym(self.signal_table_names[tn_index]) strat.update_state(candles) ##i = new_candle_index ##while i < len(candles): i = len(candles) - 1 o = strat.decide(i, self.sym_infos[sym].total_balance, self.sym_infos["USDT"].total_balance) sig = Sig(signal_tn, candles[i].date, SignalTable.get_sym(signal_tn), o.amount, candles[i].close, o.op) new_signals.append(sig) ##i += 1 ##delete created table when done DBManager.drop_table(cut_table_name) return new_signals
def enroll_student(): print("request.data = ", request.data) data = request.json print("data = ", data) course_id = data['courseid'] p_email = data['email'] p_fname = data['firstname'] p_lname = data['lastname'] dbm = DBManager() qry = 'select Name, Image_File from Courses where Id = %s and Status = "1"' % course_id course_details = dbm.execute_query(qry) course_name = course_details[0][0] course_img_file = course_details[0][1] course_url = { '1': 'http://tinyurl.com/yc5omxjl', '2': 'http://tinyurl.com/ya8st6uo', '3': 'http://tinyurl.com/y92rul68', '4': 'http://tinyurl.com/ya6cvm3j', } send_email_with_link(p_fname, p_lname, p_email, course_id, course_name, course_url[course_id], course_img_file) response = app.response_class(response=json.dumps('Success'), status=200, mimetype='application/json') return response
def main() -> None: db_manager = DBManager() base_url = 'https://icml.cc/virtual/2020' list_url = 'html/ICML2020.html' fetch_papers(db_manager, base_url, list_url, 'ICML2020', 'Main', 'ICML2020') db_manager.write_db()
def create_cut_table(self): self.first_date = CandleTable.get_first_date(self.candle_table_name) self.last_date = CandleTable.get_last_date(self.candle_table_name) self.cut_trend_table_name = self.get_trend_table_name() if DBManager.exists_table(self.cut_trend_table_name): DBManager.drop_table(self.cut_trend_table_name) tt = TrendTable(self.cut_trend_table_name) tt.save() candles = CandleTable.get_candle_array(self.candle_table_name) for c in candles: date = c.date hits = TrendTable.get_most_recent_hits(self.trend_table_name, date) trend = Trend(dbm, self.cut_trend_table_name, date, hits) trend.save() ##cursor = TrendTable.get_section(self.trend_table_name, self.first_date, self.last_date) ##trend_tuples = cursor.fetchall() ##for t in trend_tuples: ##date = t[0] ##hits = t[1] ##trend = Trend(dbm, self.cut_trend_table_name, date, hits) ##trend.save() ##dbm.save_and_close() return tt
def create_bb(self, type): bb_table_name = self.middle_avg_table_name + "_" + type ##if already exists, drop it first and then recreate if DBManager.exists_table(bb_table_name): DBManager.drop_table(bb_table_name) bb_pt_table = PointTable(bb_table_name) bb_pt_table.save() for i, avg in enumerate(self.avg_pts): if i < self.avg_period: pass else: date = avg.date if type == self.LOW: value = avg.value - ( self.bb_factor * self.stddev_pts[i - self.avg_period].value) elif type == self.HIGH: value = avg.value + ( self.bb_factor * self.stddev_pts[i - self.avg_period].value) new_pt = Point(bb_table_name, date, value) new_pt.save() dbm = DBManager.get_instance() dbm.save_and_close() return bb_table_name
def create_workload(): """ An interface to create a new workload and return a directory for the client to transfer data to and the workload id as is in the database. """ if request.method == "POST": username = request.form["username"] secret_key = "" access_key = "" key_pair = "" print "Request from %s" % username if "access_key" in request.form: access_key = request.form["access_key"] secret_key = request.form["secret_key"] key_pair = request.form["key_pair"] # Get the user id from the database res = {} try: db_manager = DBManager() user_id = db_manager.get_user_id(username, access_key, secret_key, key_pair) # Now create a new workload workload_id = db_manager.create_new_workload(user_id) workload_dir = create_workload_dir(workload_id) # Update the database to reflect the working dir being there db_manager.update_workload_dir(workload_id, workload_dir) res = {"workload_id": workload_id, "workload_dir": workload_dir} except Exception, e: print "Error: %s" % e raise e return jsonify(res)
def cut_table(orig_table_name, date_start, date_end=9999999999): ##print "Cutting table: ", orig_table_name, " candle data between: ", timestamp_to_date(date_start), " ---- ", timestamp_to_date(date_end) ##create new table curr_ref = CandleTable.get_ref_currency(orig_table_name) curr_target = CandleTable.get_target_currency(orig_table_name) period = CandleTable.get_period(orig_table_name) new_table = CandleTable(curr_ref, curr_target, date_start, date_end, period) new_table_name = new_table.table_name if DBManager.exists_table(new_table_name): DBManager.drop_table(new_table_name) new_table.save() ##populate new table with candles from orig_table that lie between the 2 dates candle_array = CandleTable.get_candle_array_by_date( orig_table_name, date_start, date_end) for c in candle_array: new_c = Candle(new_table_name, c.date, c.high, c.low, c.open, c.close, c.volume, c.quoteVolume, c.weightedAverage) new_c.save() dbm = DBManager.get_instance() return new_table_name
def main() -> None: db_manager = DBManager() base_url = 'http://www.bmva.org/bmvc/2018/' list_url = base_url + 'index.html' fetch_papers(db_manager, base_url, list_url, 'BMVC2018', 'Main', 'BMVC2018') db_manager.write_db()
def test_strategy(strat_string, start_date, ticker, resolution, num_days): total_profit = 1 total_balance = 0 total_balance_bitsec = 0 date1 = date_to_timestamp(start_date) for i in range(1): date2 = date1 + num_days * 2 * HALF_DAY tn = CandleFetcher.cut_table( eval('table_names.' + ticker + '_' + resolution), date1, date2) strat = eval(strat_string)(tn, calc_stats=False) ##strat = BollingerStrategy(tn, set_default = True) (profit, balance, balance_bitsec) = test_against_hold(strat) total_profit *= 1 + profit total_balance += balance total_balance_bitsec += balance_bitsec date1 = date2 ##sc = StatCalculator(tn) ##volatility = sc.get_volatility() ##volume = sc.get_volume() ##print ("Volatility:", volatility) ##print ("Volume:", volume) DBManager.drop_table(tn) print("total profit:", total_profit) print("total balance:", total_balance) print("total balance bitsec:", total_balance_bitsec)
def main() -> None: db_manager = DBManager() base_url = 'http://proceedings.mlr.press/' for volume, conf_name in zip(PMLR_VOLUMES, CONF_NAMES): list_url = base_url + 'v{:d}/'.format(volume) fetch_papers(db_manager, base_url, list_url, conf_name, 'Main', conf_name) db_manager.write_db()
def main() -> None: db_manager = DBManager() base_url = 'https://papers.nips.cc' for conf_link, conf_name in zip(CONF_LINKS, CONF_NAMES): list_url = base_url + '/' + conf_link fetch_papers(db_manager, base_url, list_url, conf_name, 'Main', conf_name) db_manager.write_db()
def __init__(self): meta_db = config['ora_dcm'] connection_string = "{0}/{1}@{2}:{3}/{4}".format( meta_db['user'], meta_db['pwd'], meta_db['host'], meta_db['port'], meta_db['sid']) self.db_manager = DBManager(connection_string) self.jira = JiraManager(config['jira']) schedule.every(1).minute.do(self.__job)
def main(): base_url = 'https://www.ecva.net/' db_manager = DBManager() for conf_id in CONFERENCES: list_url = base_url + 'papers.php' fetch_papers(db_manager, base_url, list_url, conf_id, 'Main', conf_id) db_manager.write_db()
def _on_refresh_clicked(self,arg): dbm = DBManager(db_loc) self.liststore.clear() for i in dbm.retrieve_all(): self.liststore.append([i[1], i[2], repr(i[4]), i[0]]) self.show_all()
class DBManagerTests(TestBase): def setUp(self): self.db_manager = DBManager('test.db') def test_insertion_sanity(self): self.db_manager.update([ {'url': 'http://example.com/', 'crawl_time': datetime.now()}, {'url': 'http://example.com/a.html', 'crawl_time': datetime.now()}]) crawled = self.db_manager.get_crawled_urls() expected = ['http://example.com/', 'http://example.com/a.html'] self.assertEqual([x.url for x in crawled], expected)
def workload_status(val): """ Return the status of each of the profiles that are being generated for the workload. """ db_manager = DBManager() workload_status = "Something went wrong." try: workload_status = db_manager.get_workload_status(val) except Exception, e: print e raise e
def _on_submit_clicked(self, arg): attr = {} for box in self.txtbox_list: buff = box.get_buffer() start = buff.get_start_iter() end = buff.get_end_iter() attr[box.__name__] = buff.get_text(start, end) text = self.te.replace(**attr) dbm = DBManager(db_loc) dbm.insert(self.email.get_text(), self.subject.get_text(), text, int(time.time()), self.template_entry.get_text()) self.destroy()
def run(self): # keep opened only one instance if self.dlg == None: from db_manager import DBManager self.dlg = DBManager(self.iface, self.iface.mainWindow()) QObject.connect(self.dlg, SIGNAL("destroyed(QObject *)"), self.onDestroyed) self.dlg.show() self.dlg.raise_() self.dlg.activateWindow()
class Crawler(object): """ Crawler - a simple web-crawler that follows internal and external links. It will save all its results in a DB, and will go on till it get to a defined max depth. """ def __init__(self, db_file='crawler.db'): """ :param db_file: The database file name to be saved """ self._url_scanner = UrlScanner() self._db_manager = DBManager(db_file) self._internal_urls_to_scan = [] self._external_urls_to_scan = [] self._max_page_depth = None self._max_external_sites_page_depth = None self._domain = None def crawl(self, url, max_page_depth=5, max_external_sites_page_depth=4, request_rate_limit=4): """ Will crawl a given url up to max_page_depth and max_external_sites_page_depth on a max rate of request_rate_limit. :param url: The to-be crawled url :param max_page_depth: Max internal (same-domain) depth :param max_external_sites_page_depth: Max external (different-domain) depth :param request_rate_limit: Up to n requests at once :return: List of Url objects (See schemas/url.py) """ self._url_scanner.set_request_limit(request_rate_limit) self._max_page_depth = max_page_depth self._max_external_sites_page_depth = max_external_sites_page_depth self._domain = get_domain(url) self._internal_urls_to_scan.append(url) self._crawl_internal_urls() self._crawl_external_urls() return self._get_crawled_urls() def _get_crawled_urls(self): return self._db_manager.get_crawled_urls() def _crawl_internal_urls(self): while self._internal_urls_to_scan and self._max_page_depth: child_urls = self._url_scanner.scan(self._internal_urls_to_scan) child_urls = list(set(child_urls) - set([x.url for x in self._db_manager.get_crawled_urls()])) self._internal_urls_to_scan = get_same_domain_urls(self._domain, child_urls) self._external_urls_to_scan.extend(get_external_urls(self._domain, child_urls)) url_objects = [{'url': url, 'crawl_time': datetime.now()} for url in self._internal_urls_to_scan] self._db_manager.update(url_objects) self._max_page_depth -= 1 def _crawl_external_urls(self): while self._external_urls_to_scan and self._max_external_sites_page_depth: self._external_urls_to_scan = self._url_scanner.scan(self._external_urls_to_scan) self._external_urls_to_scan = list( set(self._external_urls_to_scan) - set([x.url for x in self._db_manager.get_crawled_urls()])) url_objects = [{'url': url, 'crawl_time': datetime.now()} for url in self._external_urls_to_scan] self._db_manager.update(url_objects) self._max_external_sites_page_depth -= 1
class DBManagerPlugin: def __init__(self, iface): self.iface = iface self.dlg = None def initGui(self): self.action = QAction(QIcon(":/db_manager/icon"), QApplication.translate("DBManagerPlugin", "DB Manager"), self.iface.mainWindow()) self.action.setObjectName("dbManager") QObject.connect(self.action, SIGNAL("triggered()"), self.run) # Add toolbar button and menu item if hasattr(self.iface, 'addDatabaseToolBarIcon'): self.iface.addDatabaseToolBarIcon(self.action) else: self.iface.addToolBarIcon(self.action) if hasattr(self.iface, 'addPluginToDatabaseMenu'): self.iface.addPluginToDatabaseMenu(QApplication.translate("DBManagerPlugin", "DB Manager"), self.action) else: self.iface.addPluginToMenu(QApplication.translate("DBManagerPlugin", "DB Manager"), self.action) def unload(self): # Remove the plugin menu item and icon if hasattr(self.iface, 'removePluginDatabaseMenu'): self.iface.removePluginDatabaseMenu(QApplication.translate("DBManagerPlugin", "DB Manager"), self.action) else: self.iface.removePluginMenu(QApplication.translate("DBManagerPlugin", "DB Manager"), self.action) if hasattr(self.iface, 'removeDatabaseToolBarIcon'): self.iface.removeDatabaseToolBarIcon(self.action) else: self.iface.removeToolBarIcon(self.action) if self.dlg is not None: self.dlg.close() def run(self): # keep opened only one instance if self.dlg is None: from db_manager import DBManager self.dlg = DBManager(self.iface) QObject.connect(self.dlg, SIGNAL("destroyed(QObject *)"), self.onDestroyed) self.dlg.show() self.dlg.raise_() self.dlg.setWindowState(self.dlg.windowState() & ~Qt.WindowMinimized) self.dlg.activateWindow() def onDestroyed(self, obj): self.dlg = None
def request_profiles(): """ Create a profile for the workload as it is run over each of the specified instance types. """ config_file = "profiler.ini" if request.method == "POST": description = request.form["description"] workload = request.form["workload"] working_dir = request.form["working_dir"] job_desc = json.loads(description) # Create an entry in the db for each job db_jobs = {} inst_params = {} db_manager = DBManager() db_manager.update_workload_dir(workload, working_dir, None) try: for inst in job_desc["instance_types"]: instance_type = inst["type"] job_id = db_manager.insert_job(workload) db_jobs.update({instance_type: job_id}) inst_params.update({instance_type: inst["override"]}) except Exception, e: print "Error with job creation %s" % e # While we are at it, set the executable to execuable submit_line = "chmod 777 %s%s" % (working_dir, job_desc["executable"].split("/")[-1]) submit = subprocess.Popen( (["sudo", "su", "root", "-c", submit_line]), stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) s_out, s_err = submit.communicate() # Now start a thread for each of these jobs try: for inst, job_id in db_jobs.iteritems(): instance_type = inst params = inst_params[instance_type] profiler_thread = ProfilerJobThread(config_file, job_id, inst, params, job_desc) profiler_thread.start() except Exception, e: print "Error with a thread %s" % e
def run(self): # keep opened only one instance if self.dlg is None: from db_manager import DBManager self.dlg = DBManager(self.iface) QObject.connect(self.dlg, SIGNAL("destroyed(QObject *)"), self.onDestroyed) self.dlg.show() self.dlg.raise_() self.dlg.setWindowState( self.dlg.windowState() & ~Qt.WindowMinimized ) self.dlg.activateWindow()
def __init__(self, db_file='crawler.db'): """ :param db_file: The database file name to be saved """ self._url_scanner = UrlScanner() self._db_manager = DBManager(db_file) self._internal_urls_to_scan = [] self._external_urls_to_scan = [] self._max_page_depth = None self._max_external_sites_page_depth = None self._domain = None
class DBManagerPlugin: def __init__(self, iface): self.iface = iface self.dlg = None def initGui(self): self.action = QAction( QIcon(), u"DB Manager", self.iface.mainWindow() ) QObject.connect( self.action, SIGNAL( "triggered()" ), self.run ) self.iface.addPluginToDatabaseMenu( u"DB Manager", self.action ) def unload(self): self.iface.removePluginDatabaseMenu( u"DB Manager", self.action ) if self.dlg != None: self.dlg.close() def run(self): # keep opened only one instance if self.dlg == None: from db_manager import DBManager self.dlg = DBManager(self.iface, self.iface.mainWindow()) QObject.connect(self.dlg, SIGNAL("destroyed(QObject *)"), self.onDestroyed) self.dlg.show() self.dlg.raise_() self.dlg.activateWindow() def onDestroyed(self, obj): self.dlg = None
class GameCLI: def __init__(self): self.db = DBManager() self.player_name = '' def game_start_up(self): first_msg = 'Welcome to the \"Do you even math?\" game!\n' first_msg += 'Here are your options:\n- start\n- highscores\n- quit\n' print(first_msg) player_name = input('Enter your playername> ') self.player_name = player_name self.db.add_player(player_name) print('Welcome {}! Let the game begin!'.format(player_name)) def answer_questions(self): counter = 0 while True: question = Question() print('Question #{}:'.format(counter + 1)) print(question.text) answer = input('?> ') if answer == str(question.correct_answer): print('Correct!') counter += 1 else: score = counter * counter msg = 'Incorrect! Ending game. ' msg += 'Your score is: {}'.format(score) print(msg) if self.db.is_score_better(self.player_name, score): self.db.update_score(self.player_name, score) return def main_loop(self): self.game_start_up() while True: command = input('?> ') if command == 'start': self.answer_questions() break elif command == 'highscores': print(self.db.get_highscores()) elif command == 'quit': print('Bye!') break else: print('Invalid command!')
import sys import argparse import textwrap from db_manager import DBManager from services import LanguageServices # ==================================================================================== # Language Detection v1.0.0 # # This is the Language Detection Module using Language Profile Rank Order Distance. # ==================================================================================== if __name__ == '__main__': # Setup the database. DBManager.setup() # Configure the parser. parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent("""\ Language Detection v1.0 ============================================================================= These are the available commands. train_file = Train the language detector with the data from the file. The file name will become the language key. train_folder = Train the language detector with the data from the folder. The file names will become the language keys. test = Test the string with the language models. get_n_grams = Retrieve a list of n-grams. get_languages = Get a list of languages which can be detected. """)) parser.add_argument('command', choices = ["train_file", \
def __init__(self, *args, **kwargs): super(TestBase, self).__init__(*args, **kwargs) self._db_manager = DBManager('test.db') Url.__table__.drop(self._db_manager.engine) self._db_manager._create_tables()
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding('utf8') import urllib, json, os from db_manager import DBManager from decorators import async db = DBManager() def get_travel_location(address): address = urllib.quote(address) url = 'http://api.map.baidu.com/geocoder/v2/?address=%s&output=json&ak=FD09ed40290e289709ffe50b546c3a23' % address r=urllib.urlopen(url).read() rlt = json.loads(r) return rlt def insert_travel_location(jingdian_id,province,place,name): print str(jingdian_id),str(province),str(place),str(name),'...' try: address = str(province+place+name)#str(place+name) if str(name).find(str(place)) < 0 else str(name) rlt = get_travel_location(address) if rlt['status'] == 0: location_lng = rlt['result']['location']['lng'] location_lat = rlt['result']['location']['lat'] sql = '''INSERT INTO dim_travel_location(`jingdian_id`,`search_value`,`location_lng`,`location_lat`) VALUES(%s,'%s','%s','%s')''' % (str(jingdian_id),address,location_lng,location_lat) db.executeNonQuery(sql) else: f = open('error.txt','a')
def __init__(self): self.db = DBManager() self.player_name = ''
def __init__(self): self.session = DBManager.get_session() self.connection = DBManager.get_connection()
def setUp(self): self.db_manager = DBManager('test.db')
__author__ = 'Michal' import time import re from os import listdir, getcwd, path from nltk.corpus import stopwords from nltk.stem.snowball import SnowballStemmer from data_loader import load_filenames, filter_feats from db_manager import DBManager from config import DIR_FILES, DIR_CLEANED_FILES from file_utils import create_dir manager = DBManager() manager.create() stemmer = SnowballStemmer("english") VERBOSE = True def clean_word(w): """ Delete all the redundant information encoded in a word. :param w: word to be cleaned :return: cleaned word """ if w in [".", "?", "!"]: # will be needed for ngrams to find out where a sentence finishes. return w w = w.lower() w = re.sub('[^A-Za-z]+', '', w)
import os import sqlite3 from flask import Flask, jsonify, request, render_template, redirect, url_for from db_manager import DBManager app = Flask(__name__) app.config.from_object(__name__) app.config.update(dict( DATABASE = os.path.join(app.root_path, 'db.sqlite'), DATABASE_SCHEME = os.path.join(app.root_path, 'scheme.sql') )) db = DBManager(app) # Views @app.route("/") def index(): return redirect(url_for('create')) @app.route("/create/") def create(): return render_template('create.html') @app.route("/find/") def find(): return render_template('find.html')