def download(): """ Download the latest version of spammers.txt """ utils.update_db() return send_file( os.path.join(utils.basename, "git_dir/spammers.txt"), as_attachment=True, attachment_filename="spammers.txt")
def batch_running( ip, notebook_root='../notebook', ): in_result = [] cursor, db = create_connection() sql = 'SELECT notebook_id from result' cursor.execute(sql) sql_res = cursor.fetchall() for row in sql_res: in_result.append(int(row[0])) sql = 'SELECT id from notebook where add_run=1 and server_ip=\'' + ip + "'" cursor.execute(sql) sql_res = cursor.fetchall() all = 0 can_use = 0 can_use_1 = 0 for row in sql_res: notebook_id = int(row[0]) if notebook_id not in in_result: continue try: origin_code = get_code_txt(notebook_root + '/' + str(notebook_id) + '.ipynb') except Exception as e: print(e) return "read fail" origin_code, add, result = add_result(notebook_id, origin_code) # print(type(result)) # print(result) if len(result) == 0: can_use += 1 update_db("notebook", "add_model", '1', 'id', "=", notebook_id) update_db("result", "model_type", "'unknown'", 'notebook_id', "=", notebook_id) if len(result) == 1: can_use += 1 update_db("notebook", "add_model", '1', 'id', "=", notebook_id) sql = 'UPDATE result SET model_type = \'' + list( result)[0] + "' WHERE notebook_id=" + str(notebook_id) cursor.execute(sql) # print('delete id:' + str(notebook_id)) if len(result) > 1: print(result) update_db("notebook", "add_model", '2', 'id', "=", notebook_id) sql = 'delete from result where notebook_id=' + str(notebook_id) cursor.execute(sql) db.commit() print('delete id:' + str(notebook_id)) can_use_1 += 1 all += 1 print('1:', can_use) print('2:', can_use_1) print('all:', all) print('rate:', can_use / all)
def refresh_db(self): global db db = [] for curr in json.load(file(utils.user_local_db + 'artifacts.json')): entry = DBEntry(curr) db.append(entry) if len(entry.old_folder) > 0 and os.path.exists('%s/%s' % (dir_name,entry.old_folder)): os.rename('%s/%s' %(dir_name,entry.old_folder),'%s/%s' % (dir_name,entry.folder)) target_folder = dir_name + '/' + entry.folder if utils.is_db_empty(dir_name, entry.name) and os.path.isdir(resolve_artifact_dir(entry)): (found,latest) = utils.legacy_resolve_version(dir_name, entry) if found: utils.update_db(dir_name, entry.name, latest) else: entry.needs_manual_update = True if not os.path.isdir(target_folder): entry.not_installed = True elif not utils.check_if_already_updated_with_delta(dir_name, entry.name, entry.version) or not os.path.isdir(target_folder): entry.needs_update = True
def crawl_link(collection, url): # skip crawling if crawled within last 24 hours doc = collection.find_one({"link": url}) if doc is not None and doc['isCrawled'] and\ doc['lastCrawledDT'] > datetime.now() - timedelta(days=1): log.info(f' \tThe {url} is already crawled within last 24hr') return # make connection requests to url try: req = requests.get(url) except OSError as exc: log.info( f" Error occured with {url} at time {datetime.now()}. Following error occured :\n{exc}\nSkipping..." ) return if req.status_code != 200: log.debug(f" \t\t\t{url} gave response code {req.status_code}") update_db(collection, url, src_url, req) return # create html file after successful request if 'text/html' in req.headers['content-type']: html_doc = req.text file_path, file_created_DT = create_file(html_doc, conf.file_dir, conf.path, 'utf-8', doc['filePath']) else: log.info(f' Not html.. ignored') return # update the database information update_db(collection, url, src_url, req, file_path, file_created_DT) # return the html document for scraping purposes return html_doc
def update_msg(): id = request.form.get('id') username = request.form.get('username') password = request.form.get('password') sex = request.form.get('sex') age = request.form.get('age') phone = request.form.get('phone') email = request.form.get('email') role = request.form.get('role') tup = (password, int(sex), int(age), int(phone), email, int(role), int(id)) print tup p = utils.update_db(tup) #user_list=['password','sex','agr','phone','email','role'] if p == 0: user_tup = utils.select_user(username) return render_template('user_show.html', user=user_tup)
def simulate_float(pop_sizes, gene_sizes, inter_size, generations, mut_params, ev_alg=evolution.ev_basic, bact_fitness=None, plant_fitness=None, generator=utils.gen_random, init_seed=0, ev_seed=0, n_threads=1): Bacteria.mut_v = mut_params[0][0] Bacteria.mut_m = mut_params[0][1] Plant.mut_v = mut_params[1][0] Plant.mut_m = mut_params[1][1] dist_epochs = 100 Bacteria.dist_epochs = dist_epochs Plant.dist_epochs = dist_epochs cnt = 0 #try to generate populations with necesserary distance between them while (1): cnt += 1 bacteria_pop, plant_pop = generator(pop_sizes, gene_sizes) d = np.linalg.norm( bact_fitness.run(bacteria_pop[0].gene) - plant_fitness.run(plant_pop[0].gene)) if ((d > 1.26 and d < 1.36) or cnt > 100000): break np.random.seed(init_seed) if (generator == utils.gen_same_bacteria): ancestral = copy.deepcopy(bacteria_pop[0]) time_sum = 0 np.random.seed(ev_seed) picname = os.path.join("pic", str(mut_params) + "_" + str(ev_seed)) if not os.path.exists(picname): os.mkdir(picname) print(picname) probs_lines = inspect.getsource(evolution.get_probs) bact_db, plant_db = dict(), dict() t_bact_db, t_plant_db = dict(), dict() fout = open(os.path.join(picname, "statistics.txt"), "w") print("bact_speed", "plant_speed", "inter_sim", "bact_div", "plant_div", "bact_ent", "plant_ent", file=fout) print( np.linalg.norm( bact_fitness.run(bacteria_pop[0].gene) - plant_fitness.run(plant_pop[0].gene))) dlog = open(os.path.join(picname, "log.txt"), "w") for epoch in range(generations): start_time = time.time() print(epoch) bact_db, plant_db = utils.update_db(bacteria_pop, plant_pop, bact_db, plant_db, epoch, dist_epochs) b_d, p_d = utils.normed_distance_db(bacteria_pop, plant_pop, bact_db, plant_db) print(b_d, file=fout, end=" ") print(p_d, file=fout, end=" ") bact_genes = np.array([bacteria.gene for bacteria in bacteria_pop]) plant_genes = np.array([plant.gene for plant in plant_pop]) t_bact_genes, t_bact_db = utils.update_t_db(bacteria_pop, t_bact_db, bact_fitness, epoch) t_plant_genes, t_plant_db = utils.update_t_db(plant_pop, t_plant_db, plant_fitness, epoch) t_b_probs, t_p_probs = utils.inter_labeling(t_bact_genes, t_plant_genes, 12) b_probs, p_probs = utils.intra_labeling(bact_genes, 12), utils.intra_labeling( plant_genes, 12) inter_dist = utils.hell_dist(t_b_probs, t_p_probs) print(inter_dist, file=fout, end=" ") bact_div, plant_div = utils.mc_diversity(bacteria_pop, 10000), utils.mc_diversity( plant_pop, 10000) print(bact_div, file=fout, end=" ") print(plant_div, file=fout, end=" ") bact_ent, plant_ent = utils.pop_entropy(b_probs), utils.pop_entropy( p_probs) print(bact_ent, file=fout, end=" ") print(plant_ent, file=fout) utils.flush_file(fout) if (inter_size == 2): utils.save_2d( t_bact_genes, t_plant_genes, epoch, picname, str(gene_sizes) + " " + str(pop_sizes) + " " + str(mut_params) + "\n" + probs_lines) bacteria_pop, plant_pop, mean_dist = ev_alg(bacteria_pop, plant_pop, t_bact_genes, t_plant_genes) if (epoch % 1000 == 0 and epoch != 0): orgType = np.random.randint(0, 2) funcType = np.random.randint(0, 2) funcTypes = [evolution.glob_disaster, evolution.local_surv] orgPops = [bacteria_pop, plant_pop] orgCls = [Bacteria, Plant] orgName = ["bacteria", "plant"] funcName = ["disaster", "local_surv"] print(epoch, funcName[funcType], orgName[orgType], file=dlog) if (orgType == 0): bacteria_pop = funcTypes[funcType](bacteria_pop, bact_genes, Bacteria) else: plant_pop = funcTypes[funcType](plant_pop, plant_genes, Plant) utils.flush_file(dlog) elapsed = time.time() - start_time time_sum += elapsed print(str(int(((epoch + 1) / generations * 100))) + str("%"), end=" ") print("%0.3f" % (time_sum / (epoch + 1) * (generations - epoch - 1))) t_bact_genes = np.array( [bact_fitness.run(bacteria.gene) for bacteria in bacteria_pop]) t_plant_genes = np.array( [plant_fitness.run(plant.gene) for plant in plant_pop]) return [ np.array([bacteria.gene for bacteria in bacteria_pop]), t_bact_genes, t_plant_genes, np.array([plant.gene for plant in plant_pop]) ]
def main_oss_callback_db(ret_dict, code_msg): task = ret_dict["task"] url = task["url"] callback_url = task["callback"] source = task["source"] task_uuid = task["task_uuid"] # print("*"*15, " main_oss_callback_db, received: ", ret_dict) file_local_path = ret_dict["file_local_path"] file_my_oss_path = ret_dict["file_oss_path"] file_local_path_list = file_local_path.rsplit("/", 1) file_local_dir = file_local_path_list[0] image_dir = os.path.join(file_local_dir, "image") im_path = os.path.join(image_dir, file_local_path_list[-1]) anno_dir = os.path.join(file_local_dir, "anno") if not os.path.exists(image_dir): os.makedirs(image_dir) oss_download(im_path, file_my_oss_path) if not os.path.exists(anno_dir): os.makedirs(anno_dir) ret_backend = main_backend(image_dir, anno_dir, code=code_msg["code"], message=code_msg["message"]) if ret_backend != None: task.update({"product_catelog": ret_backend}) task.update(code_msg) print("*" * 20, " Classifier Result: ", task) try: if callback_endpoint(callback_url, task): update_db(source, task["task_uuid"], task["uuid"], task["url"], 0, 2, code_msg["message"], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) else: code_msg["message"] = "[Error]: no response from callback" update_db(source, task["task_uuid"], task["uuid"], task["url"], 1, 2, code_msg["message"], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) except Exception as e: print(e) # del task["task_uuid"] return True else: task.update({"product_catelog": ""}) task.update({ "code": 320201, "message": "[Error]: no keyword this image" }) print("*" * 40, " Classifier Result: ", task) try: if callback_endpoint(callback_url, task): update_db(source, task["task_uuid"], task["uuid"], task["url"], 1, 2, code_msg["message"], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) else: code_msg["message"] = "[Error]: no response from callback" update_db(source, task["task_uuid"], task["uuid"], task["url"], 1, 2, code_msg["message"], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) code_msg.update({ "code": 320201, "message": "[Error]: no keyword this image" }) # update_db(source, task["task_uuid"], task["uuid"], task["url"], 1, 2, code_msg["message"], time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) except Exception as e: print(e) return False
# Current Date DataFrame => c_d_df c_d_df = df.loc[d].to_dict() # Orders if c_d_df["order_type"] == "buy" or c_d_df["order_type"] == "sell": # Current close close = c_d_df["close"] # BTC & ETH can be splitted close = divide_crypto(cryptos[key], ["BTC-USD", "ETH-USD"], close, 100) # Update wallet, ledger & portfolio wallet, ledger, portfolio = update_db(wallet, ledger, portfolio, d, cryptos[key], c_d_df["order_type"], close) except KeyError as e: print(f"KeyError -- {cryptos[key]} -- {e}") # ROI portfolio = undivide_crypto(["BTC-USD", "ETH-USD"], portfolio, 100) portfolio_value, overall_value, roi = calculate_metrics( portfolio, df_list, cryptos, wallet, 5000) display_results(cryptos, 5000, wallet, portfolio, portfolio_value, overall_value, roi) generate_log(ledger)
help="update only movie database (by default does movie and tv)") parser.add_argument( "--tv", action="store_true", help="update only tv database (by default does movie and tv)") args = parser.parse_args() db_types = ['movie', 'tv'] if args.movie is True: db_types = ['movie'] elif args.tv is True: db_types = ['tv'] for db_type in db_types: df = pd.read_csv('db_{}.csv'.format(db_type), encoding='utf-8') df = update_db(df, search, db_type=db_type) with open("templates/{}.html".format(db_type)) as f: txt = f.read() soup = bs4.BeautifulSoup(txt, features='lxml') template = soup.find("div", class_="row") for i, item in enumerate(df.iloc): # print(item['Title']) tag = data2tag(soup, column_tag=item["Type"], img=item["Poster"], title=item['Title'], info="", year=item["Year"]) if i == 0:
def open_supply_version(self, e): dlg = wx.TextEntryDialog(self,'Please enter the last version you installed', defaultValue='2.0') dlg.ShowModal() utils.update_db(dir_name, item.name, dlg.GetValue()) dlg.Destroy() update_tree_view(self)
def update_item(self, item,dispose_dia = True): post_ga_event('update_item','artifact_%s' % item.name) try: if item.create_delta: for deltaitem in item.deltas: if not utils.check_if_already_updated_with_delta(dir_name, item.name, deltaitem.version): log_dia_info('Updating file %s' % deltaitem.file) retries = 0 nof_retries = 3 while retries < nof_retries: utils.get_file(utils.resolve_delta_dir(item),utils.get_storage_location(deltaitem.file),deltaitem.file,item.name) dia_tick() if was_dia_cancelled(): log_dia_info('Cancelling...') break if item.dynamic_import: utils.delete_folder(utils.user_file_cache_dyn) utils.create_dir(utils.user_file_cache_dyn) try: if was_dia_cancelled(): log_dia_info('Cancelling...') break utils.extract_file(utils.get_storage_location(deltaitem.file), utils.user_file_cache_dyn,item,True) if was_dia_cancelled(): log_dia_info('Cancelling...') break dynamic_import.move_in_place(utils.user_file_cache + 'dyn/%s/' % item.folder, '%s/%s/' % (dir_name, item.folder)) if was_dia_cancelled(): log_dia_info('Cancelling...') update_tree_view(self) break utils.update_db(dir_name, item.name, deltaitem.version) utils.delete_folder(utils.user_file_cache + 'dyn/%s/' % item.folder) item.needs_update = False update_tree_view(self) dia_tick() break except FtpOverloadedException: post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file) raise except InvalidZipFileException as e: post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file) utils.delete_file(utils.get_storage_location(deltaitem.file)) if retries == nof_retries-1: raise else: log_info('Unpacking %s into %s' % (item.name, dir_name)) try: if was_dia_cancelled(): log_dia_info('Cancelling...') break utils.extract_file(utils.get_storage_location(deltaitem.file), dir_name,item,False) if was_dia_cancelled(): log_dia_info('Cancelling...') update_tree_view(self) break utils.update_db(dir_name, item.name, deltaitem.version) target_folder = dir_name + '/' + item.folder log_dia_info('Updated %s with deltafile %s at location %s' % (item.name,deltaitem.file,target_folder)) item.needs_update = False update_tree_view(self) dia_tick() if utils.get_boolean_user_setting(delete_files_after_install): utils.delete_file(utils.get_storage_location(deltaitem.file)) break except FtpOverloadedException: post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file) raise except InvalidZipFileException: post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file) log_dia_info('Invalid deltazifile, delete and retry') utils.delete_file(utils.get_storage_location(deltaitem.file)) if retries == nof_retries-1: raise retries += 1 if dispose_dia: wx.CallAfter(dispose_dialog) except FtpOverloadedException: if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Too many users right now, please try again later') except InvalidZipFileException as e: if dispose_dia: wx.CallAfter(dispose_dialog_fail,e.message) except: if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0]) finally: update_tree_view(self)
def install_item(self, current_item, dispose_dia = True): post_ga_event('install_item','artifact_%s' % current_item.name) folder = '%s/%s/' % (dir_name, current_item.folder) if not utils.check_if_already_updated_with_delta(dir_name, current_item.name, current_item.version) or not os.path.exists(folder): try: log_dia_info('Getting full entry %s' % current_item.name) retries = 0 nof_retries = 3 while retries < nof_retries: try: utils.get_file(current_item.basedir,utils.get_storage_location(current_item.file),current_item.file,current_item.name) if os.path.exists(folder): log_dia_info('Deleting current folder %s (this may take a while, please be patient)' % folder) utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) if was_dia_cancelled(): log_dia_info('Downloaded for %s was cancelled' % current_item.name) if dispose_dia: wx.CallAfter(dispose_dialog) return log_dia_info('Downloaded %s' % current_item.name) log_dia_info('Extracting files %s' %dir_name) dia_tick() utils.extract_file(utils.get_storage_location(current_item.file), dir_name,current_item,True) dia_tick() break; except InvalidZipFileException: post_ga_event('install_item_invalid_zip','artifact_%s' % current_item.name) log_info('Invalid zifile, delete and retry') utils.delete_file(utils.get_storage_location(current_item.file)) if retries == nof_retries-1: raise retries+=1 if utils.get_boolean_user_setting(delete_files_after_install): utils.delete_file(utils.get_storage_location(current_item.file)) if was_dia_cancelled(): if dispose_dia: wx.CallAfter(dispose_dialog) return log_dia_info('Update db') utils.update_db(dir_name, current_item.name, current_item.version) current_item.not_installed = False log_dia_info('Done extracting full entry %s at location %s' % (current_item.name, dir_name)) log_dia_info('Install done') if dispose_dia: wx.CallAfter(dispose_dialog) except InvalidZipFileException as e: utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) log_dia_info('Install failed du to error during fetch or unzip') if dispose_dia: wx.CallAfter(dispose_dialog_fail,e.message) except FtpOverloadedException as e: log_dia_info('Too many users, please try agin in a while') if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Too many users, please try later') except: utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) log_dia_info('Install failed due to unknown error') if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0]) finally: update_tree_view(self) else: if dispose_dia: wx.CallAfter(dispose_dialog)