def getId(table, val): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) if table == 'users': sql = '''select id from users where username = %s''' elif table == 'tags': sql = '''select id from tags where tag_name = %s''' else: return None try: cur = db.cursor() cur.execute(sql, val) data = cur.fetchone() cur.close() db.close() if data != None: return data[0] else: return None except: db.close() utils.PrintException() return None
def getData(table, val): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) if table == 'comments': sql = '''select * from comments where author_text = %s and permlink = %s limit 1''' elif table == 'comments_votes': sql = '''select * from comments_votes where user_id = %s and comment_id = %s limit 1''' else: return None try: cur = db.cursor() cur.execute(sql, val) data = cur.fetchone() cur.close() db.close() return data except: db.close() utils.PrintException() return None
def updateCount(undo_id): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) print('get_in_update_count: ', undo_id) sql = 'update undo_op set count = count + 1 where id = %s' try: cur = db.cursor() cur.execute(sql, undo_id) db.commit() cur.close() db.close() print('update_count_success: ', undo_id) except Exception as e: print('update_count_failed: ', undo_id) cur.close() db.rollback() db.close() utils.PrintException(undo_id)
def __init__(self): config = utils.get_config('object_detector_config.yaml') module_name = config['object_detector_module'] object_detector_config = config['object_detector_config'] ObjectDetectorClass = locate(module_name) self.object_detector = ObjectDetectorClass(object_detector_config)
def single_model_main(): config_file = "D:/src/AutismDetection/AutismDetection/docs/plr_config.json" exs_conf, output_conf = utils.get_config(config_file) # Read data and significant features data. features = pd.read_csv(output_conf['all_features']) significant_difference_features_info = pd.read_csv( output_conf['significant_difference_features_info']) significant_difference_features_name = significant_difference_features_info[ 'name'].tolist() # Use grid search and naive bayes to find out the best features combination. scores, best_features_combination = find_best_features_combination( features, significant_difference_features_name) # Store the result scores. scores.to_csv(output_conf["best_features_combination"], index=False) scores, _, best_models = find_best_model(features, best_features_combination) scores.to_csv(output_conf["scores_models"], index=False) save_model(best_models, output_conf["best_model"]) model = load_model(output_conf["best_model"]) pred = predict(sample=features.iloc[0, :], model=model) if pred == 1: print("Autism Spectrum Disorder High Risky!!!") elif pred == 0: print("Typical Development.")
def prepare_best_features(config_file): """ Parse config_file Args: config_file: file need to parse Returns: features: significant_difference_features_name: best_features_combination: output_conf: Raises: """ exs_conf, output_conf = utils.get_config(config_file) # Read data and significant features data. features = pd.read_csv(output_conf['all_features']) significant_difference_features_info = pd.read_csv( output_conf['significant_difference_features_info']) significant_difference_features_name = significant_difference_features_info[ 'name'].tolist() # Read best features combination best_features_combination = pd.read_csv( output_conf["best_features_combination"]) best_features_combination = best_features_combination.iloc[ 0, 5:].dropna().tolist() return features, significant_difference_features_name, best_features_combination, output_conf
def load_configs(self): self.ros_config = utils.get_config('ros_config.yaml') self.subscribers = self.ros_config['subscribers'] self.publishers = self.ros_config['publishers'] self.subsampling = self.ros_config['subsampling'] rospy.loginfo("ROS configs loaded successfully")
def __init__(self, **kwargs): super().__init__(None, **kwargs) self.setWindowTitle('YandexEditor') self.kwargs = kwargs self.arg_size = kwargs['size'] with open('style.qss') as f: self.setStyleSheet(f.read()) self.code_widget = CodeEditor(self) # TODO: Replace with QScintilla self.code_widget.launching = True self.code_widget.setObjectName('codeWidget') self.code_widget.move(8, self.window_icon.height()) self.code_widget.kwargs = self.kwargs self.open_action.triggered.connect(self.code_widget.open_file) self.new_action.triggered.connect(self.code_widget.new_file) self.save_action.triggered.connect( lambda: self.code_widget.save(agreed=True)) self.settings_action.triggered.connect(self.show_settings) self.run_action.triggered.connect(self.code_widget.run_script) self.config = utils.get_config() self.restore_state() self.settings = utils.get_settings() self.settings_w = SettingsWindow(self, self.settings, **self.kwargs) self.restore_settings() self.code_widget.launching = False
def updateCheckPoint(check_point): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) with db.cursor() as cursor: try: sql = '''update config set val = %s where param = "check_point"''' cursor.execute(sql, (check_point, )) db.commit() db.close() return True except Exception as e: db.rollback() db.close() print('[warning]insert block cache error', e, sql, insert_data) return False
def saveToMysql(insert_data): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) with db.cursor() as cursor: try: sql = ''' insert into `block_cache` ( `block_num`, `previous`, `block_id`, `block_info`, `timestamp` ) values (%s, %s, %s, %s, %s)''' cursor.executemany(sql, insert_data) db.commit() db.close() return True except Exception as e: db.rollback() db.close() print('[warning]insert block cache error', e, sql, insert_data) return False
def cli(task, config, auto, duration, wait, max_click, skip, close, cashout, solo, cron): config = get_config(config=config) if task is None: task = config.get('task') while True: try: if task == 'empty': from apps.empty import EmptyTask e = EmptyTask(config=config) e.run() elif task == 'koinme': from apps.koinme import Koinme auto = get_config_value(auto, 'auto', config, _default=False) k = Koinme(config=config) k.koinme(auto=auto) elif task == 'ameb': from apps.ameb import Ameb cron = get_config_value(cron, 'cron', config, _default=False) duration = get_config_value(duration, 'duration', config, _default=None) wait = get_config_value(wait, 'wait', config, _default=None) a = Ameb(config=config) a.ameb(cron=cron, duration=duration, wait=wait) elif task == 'am_emu': from apps.ameb import Ameb max_click = get_config_value(max_click, 'max_click', config, _default=None) duration = get_config_value(duration, 'duration', config, _default=None) skip = get_config_value(skip, 'skip', config, _default=None) close = get_config_value(close, 'close', config, _default=False) cashout = get_config_value(cashout, 'cashout', config, _default=True) a = Ameb(config=config) a.am_emu(max_click=max_click, duration=duration, skip=skip, close=close, cashout=cashout) elif task == 'eb_emu': from apps.ameb import Ameb solo = get_config_value(solo, 'solo', config, _default=True) close = get_config_value(close, 'close', config, _default=False) cron = get_config_value(cron, 'cron', config, _default=False) duration = get_config_value(duration, 'duration', config, _default=None) a = Ameb(config=config) a.eb_emu(solo=solo, close=close, cron=cron, duration=duration) elif task == 'ameb_emu': from apps.ameb import Ameb max_click = get_config_value(max_click, 'max_click', config, _default=None) duration = get_config_value(duration, 'duration', config, _default=None) skip = get_config_value(skip, 'skip', config, _default=None) close = get_config_value(close, 'close', config, _default=False) cashout = get_config_value(cashout, 'cashout', config, _default=False) a = Ameb(config=config) a.ameb_emu(max_click=max_click, duration=duration, skip=skip, close=close, cashout=cashout) elif task == 'bing': from apps.bing import BingTask close = get_config_value(close, 'close', config, _default=True) a = BingTask(config) a.run(close=close) except KeyboardInterrupt: break except Exception as e: # print(e) # raise time.sleep(60 * 10)
def load_config(self): self.darknet_config = utils.get_config('darknet_object_detector_config.yaml') classes_file = self.darknet_config['classes_file'] self.classes = None # Load names of classes with open(utils.get_abs_path(classes_file), 'rt') as f: self.classes = f.read().rstrip('\n').split('\n')
def parseVote(val): config = utils.get_config() undo_count = config['undo_count'] undo_id = val[0] block_num = val[1] trans_id = val[2] op_idx = val[3] op = json.loads(val[4]) task_type = val[5] block_time = val[7] current_count = val[6] try: op_type = op[0] op_detail = op[1] print('parse_vote:', undo_id) if op_type == 'vote': print('get_in_vote') weight = op_detail['weight'] if weight >= 0: updown = True else: weight = (-1) * weight updown = False voter_id = getId('users', op_detail['voter']) if voter_id == None: print('not_found_voter_id', undo_id) return updateCount(undo_id) comment = getData('comments', (op_detail['author'], op_detail['permlink'])) if comment == None: print('not_found_comment', block_num, trans_id, op_idx) return updateCount(undo_id) else: # vote to comment vote = getData('comments_votes', (voter_id, comment[0])) if vote != None: # edit vote return updateData('comments_votes', vote[0], undo_id, ( comment[0], voter_id, weight, updown, vote[5], block_time)) else: # insert comment vote return insertData('comments_votes', undo_id, (( comment[0], voter_id, weight, updown, block_time, block_time), )) except Exception as e: utils.PrintException(undo_id) return updateCount(undo_id)
def updateData(table, old_id, undo_id, val): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) if table == 'comments': sql = '''update comments set permlink = %s, title = %s, body = %s, json_metadata = %s, parent_permlink = %s, created_at = %s, updated_at = %s, is_del = %s, parent_author_text = %s, author_text = %s where id = {}'''.format(old_id) elif table == 'comments_votes': sql = '''update comments_votes set comment_id = %s, user_id = %s, weight = %s, updown = %s, created_at = %s, updated_at = %s where id = {}'''.format(old_id) else: return None remove_undo_op_sql = '''delete from undo_op where id = %s''' try: cur = db.cursor() #update data #print('sql', sql, val) cur.execute(sql, val) #remove undo_op cur.execute(remove_undo_op_sql, undo_id) db.commit() cur.close() db.close() return True except: cur.close() db.rollback() db.close() updateCount(undo_id) utils.PrintException(undo_id) return False
def mainMultiProcess(): global task_type config = utils.get_config() while True: all_tasks = tasks.splitTasks(tasks.get(task_type), config['slice_step']) if all_tasks != []: p = ProcessPoolExecutor(config['worker']) for t in all_tasks: p.submit(processor, t) p.shutdown() time.sleep(3)
def main(): while True: config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect(host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor, db=db_c['db'], autocommit=False) last_block_nums = [] for task_type in tasks.task_type.values(): sql = '''select * from multi_tasks where task_type = %s and is_finished = 1 order by block_num_to desc limit 2''' with db.cursor() as cur: cur.execute(sql, (task_type, )) res = cur.fetchall() # leave one finished task if len(res) <= 1: print('task_type', task_type, 'has_not_finished') last_block_nums.append(0) else: print('add', res[1]['block_num_to'], 'of task_type', task_type) last_block_nums.append(res[1]['block_num_to']) block_num = min(last_block_nums) if block_num > 0: print('will_remove_block_num', block_num) #remove multi_tasks sql = '''delete from multi_tasks where block_num_to <= %s''' with db.cursor() as cur: cur.execute(sql, (block_num, )) #remove block_cache sql = '''delete from block_cache where block_num <= %s''' with db.cursor() as cur: cur.execute(sql, (block_num, )) try: print('commit_task') db.commit() except: print('rollback') db.rollback() db.close() print('sleep') time.sleep(60 * 5)
def options(): import argparse parser = argparse.ArgumentParser(description='NeuralCrossbreed Arguments') parser.add_argument('-c','--config', type=str, default='./config.yaml', help='e.g.> --config=\'./config.yaml\'') parser.add_argument('-n','--ngpu', type=int, help='e.g.> --ngpu=1', default=None) parser.add_argument('-g','--gpu_1st', type=int, help='e.g.> --gpu_1st=0', default=0) # parser.add_argument('-m','--metric', action='store_true', help='if specified, use AFHQ dataset for quantitative evaluation) args = parser.parse_args() print(args) config = get_config(args.config) return args, config
def load_configs(self, config_file): self.darknet_config = utils.get_config(config_file) self.conf_threshold = self.darknet_config['conf_threshold'] self.nms_threshold = self.darknet_config['nms_threshold'] self.inp_width = self.darknet_config['inp_width'] self.inp_height = self.darknet_config['inp_height'] self.model_configuration = utils.get_abs_path( self.darknet_config['model_configuration']) self.model_weights = utils.get_abs_path( self.darknet_config['model_weights']) print("ML Configs loaded successfully")
def init(): global ak, sk, channel, handler # js -> python channel = QWebChannel() handler = CallHandler() channel.registerObject('handler', handler) web_view.page().setWebChannel(channel) # js -> python # 检查AccessKey和SecretKey ak, sk, _ = get_config() if ak is None: web_view.page().runJavaScript('show_setting_dialog();') return web_view.page().runJavaScript('set_keys("%s", "%s");' % (ak, sk))
def download_url(self, url): global download_task # 判断此任务是否已经存在 for t in download_task: if t['url'] == url: return "False" # 截取文件名,拼接出本地文件路径 file_name = url.split("/")[-1] _, _, save_dir = get_config() save_file = save_dir + file_name # 添加到任务列表 download_task.append({"url": url, "file": save_file, "status": 0}) download_file(url, save_file) return "True"
def main(): config_file = "D:/src/AutismDetection/AutismDetection/docs/plr_config.json" # used changed by user _, output_conf = utils.get_config(config_file) features, labels = read_features(feature_path=output_conf['all_features'], all_features=None) significant_difference_features_info, significant_difference_feature_list \ = significant_difference_analysis(features, labels) significant_difference_features_info.to_csv( output_conf['significant_difference_features_info']) corr = correlation_analysis(features, significant_difference_feature_list) significant_difference_features, significant_difference_feature_list = significant_difference_reduction( features, labels)
def delData(table, old_id, undo_id): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) if table == 'posts': sql = '''update posts set is_del = 1 where id = %s''' elif table == 'comments': sql = '''update comments set is_del = 1 where id = %s''' elif table == 'undo_op': sql = None else: return None remove_undo_op_sql = '''delete from undo_op where id = %s''' try: cur = db.cursor() #remove data if sql != None: cur.execute(sql, old_id) #remove undo_op cur.execute(remove_undo_op_sql, undo_id) db.commit() cur.close() db.close() return True except: cur.close() db.rollback() db.close() updateCount(undo_id) utils.PrintException(undo_id)
def generate_tasks(block_from, block_to, conn, user_t): config = utils.get_config() i = block_from data = [] while i <= block_to: tmp_to = i + config['block_step'] if tmp_to > block_to: tmp_to = block_to data.append((user_t, i, tmp_to, 0)) i = tmp_to + 1 sql = ''' insert into multi_tasks (task_type, block_num_from, block_num_to, is_finished) values (%s, %s, %s, %s)''' with conn.cursor() as cur: cur.executemany(sql, data) conn.commit() return data
def main(): args = parser.parse_args() config = get_config(args.config) vgg, model = get_networks(config, load_checkpoint=True) if config['localization_test']: test_dataloader, ground_truth = load_localization_data(config) roc_auc = localization_test(model=model, vgg=vgg, test_dataloader=test_dataloader, ground_truth=ground_truth, config=config) else: _, test_dataloader = load_data(config) roc_auc = detection_test(model=model, vgg=vgg, test_dataloader=test_dataloader, config=config) last_checkpoint = config['last_checkpoint'] print("RocAUC after {} epoch:".format(last_checkpoint), roc_auc)
def mainMultiProcess(): config = utils.get_config() undo_sleep = config['undo_sleep'] undo_thread_count = config['undo_thread_count'] while True: #get undo op data = getUndoTasks() task_queue = queue.Queue() if data != (): for tmp_tasks in data: task_queue.put(tmp_tasks) # make multi threads thread_list = [] for n in range(undo_thread_count): t_t = threading.Thread(target=processor, args=(task_queue, )) thread_list.append(t_t) for t in thread_list: t.setDaemon(True) t.start() task_queue.join() # suspend before all tasks finished print('get_in_sleep') time.sleep(undo_sleep)
def __init__(self, config_file, input_filename=None, output_filename_base=None, output_directory=None): # create parameters self.__dict__.update(utils.get_config(config_file)) # Load list of locations self.locations_df = self._load_locations() # load spacy model logger.info("Loading model {}".format(self.model)) self.nlp = spacy.load(self.model) # get df of articles self.articles_df = self._load_articles(input_filename) # get keywords self.keywords = ImpactTableGenerator._get_keywords(config_file) # prepare output if output_filename_base is None: output_filename_base = 'impact_data_{keyword}_{country}'.format( keyword=self.keyword, country=self.country) self.output_filename_base = output_filename_base if output_directory is None: self.output_directory = OUTPUT_DIRECTORY else: self.output_directory = output_directory if not os.path.exists(self.output_directory): os.makedirs(self.output_directory) self.writer = ExcelWriter( os.path.join(self.output_directory, self.output_filename_base + '.xlsx')) self.df_impact = ImpactTableGenerator._make_df_impact()
def mainMultiProcess(): global check_point_sum try: config = utils.get_config() base_sleep = config['base_sleep'] base_slice_step = config['base_slice_step'] base_thread_count = config['base_thread_count'] signal.signal(signal.SIGINT, quit) signal.signal(signal.SIGTERM, quit) while True: all_tasks = BaseTasks.get() print('all_tasks_count', len(all_tasks)) # print('all_tasks', all_tasks) if all_tasks == []: print('no_tasks') continue task_queue = queue.Queue() for tmp_tasks in all_tasks: task_queue.put(tmp_tasks) # make multi threads thread_list = [] for n in range(base_thread_count): t_t = threading.Thread(target=processor, args=(task_queue, )) thread_list.append(t_t) for t in thread_list: t.setDaemon(True) t.start() task_queue.join() # suspend before all tasks finished check_point_sum = 0 time.sleep(base_sleep) except Exception as e: utils.PrintException() sys.exit()
def getUndoTasks(): config = utils.get_config() db_c = config['steem_config'] db = pymysql.connect( host=db_c['host'], port=db_c['port'], user=db_c['user'], password=db_c['pass'], charset='utf8mb4', db=db_c['db'], autocommit=False) undo_count = config['undo_count'] undo_limit = config['undo_limit'] #get undo op sql = '''select * from undo_op where count <= %s order by id asc limit %s''' with db.cursor() as cur: cur.execute(sql, (undo_count, undo_limit)) data = cur.fetchall() db.close() return data
def options(): import argparse parser = argparse.ArgumentParser(description='NeuralCrossbreed Arguments') parser.add_argument('-c','--config', type=str, default='./config.yaml', help='e.g.> --config=\'./config.yaml\'') parser.add_argument('-n','--ngpu', type=int, help='e.g.> --ngpu=1', default=None) parser.add_argument('-g','--gpu_1st', type=int, help='e.g.> --gpu_1st=0', default=0) parser.add_argument('-xa','--input_a', type=str, default='./sample_images/dog5.png', help='e.g.> --input_a=\'./sample_images/dog2.png\'') parser.add_argument('-xb','--input_b', type=str, default='./sample_images/dog3.png', help='e.g.> --input_b=\'./sample_images/dog2.png\'') parser.add_argument('-i','--niter', type=int, help='e.g.> --niter=5', default=5) parser.add_argument('-d','--disentangled', action='store_true') parser.add_argument('-t','--tau', type=float, help='e.g.> --tau=0.3', default=0.3) args = parser.parse_args() print(args) config = get_config(args.config) global trans trans = transforms.Compose([ transforms.Resize(config['img_size']), transforms.CenterCrop(config['img_size']), transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),]) return args, config
import logging from flask import Flask, jsonify from flask_cors import cross_origin from flask_restplus import Resource, Api from werkzeug.exceptions import HTTPException, default_exceptions from models.v1.indicators.indicator import api as indicator_api from models.v1.assets.asset import api as asset_api from models.v1.asset_groups.asset_group import api as asset_group_api from models.v1.services.service import api as service_api from utils.utils import get_config logger = logging.getLogger(__name__) CONFIG = get_config() app = Flask(__name__) api = Api(app, doc=False) api.add_namespace(indicator_api) api.add_namespace(asset_api) api.add_namespace(asset_group_api) api.add_namespace(service_api) @api.route("/status") class status(Resource): @api.doc("a klingon test/status endpoint") def get(self): body = {"message": "Qapla'!"} return jsonify(body) @api.errorhandler(HTTPException)