def get_version(): root_path = util.get_root_path() url = root_path + "/conf/stp.version" try: version = open(url, "r").read() except FileNotFoundError: logger.error("get version failed. can not find version file.\n %s" % traceback.format_exc()) raise STPHTTPException("can not found file: stp.version", 503, 10002) return version
def merge_log_to_missnp(output_file): """ Takes in a .log file and a .missnp file and merges them together. There should be a separate output file of the two files merged together. :return: The name of the merged files, or an empty string if the log file and a .missnp file is not present. :rtype: String :param output_file: The output flag passed in as a command line argument. """ output_file_root_dir = util.get_root_path(output_file) output_file_name = util.get_filename(output_file) input_logfile = '{}.log'.format(output_file) input_missnp = '{}-merge.missnp'.format(output_file) merged_missnp_output = '{}_{}'.format(output_file, 'MERGED_LOG_MISSNP.txt') merged_missnp_output_lines = list() missing_logfile = False missing_missnp_file = False try: with open(input_missnp, 'r') as missnp: merged_missnp_output_lines += missnp.readlines() except FileNotFoundError: missing_missnp_file = True print('.missnp file [ {} ] does not exist. Excluding from merge...'. format(input_missnp)) try: with open(input_logfile, 'r') as logfile_in: for line in logfile_in: if line.startswith('Warning:'): rs_id = re.search( 'rs[0-9]+', line) # regular expression to grab rsID's if rs_id: rs_id = rs_id.group(0) # id = line.split('rs', 1)[1] # gets the snp id rs_id = rs_id.strip('\n') rs_id = rs_id.strip("'.") merged_missnp_output_lines.append( rs_id + '\n') # append to missnp file except FileNotFoundError: print('Log file [ {} ] does not exist. '.format(input_logfile)) missing_logfile = True if (missing_missnp_file and missing_logfile) or len(merged_missnp_output_lines) < 1: return '' else: with open(merged_missnp_output, 'w+') as merged_output: for line in merged_missnp_output_lines: merged_output.write(line) return merged_missnp_output
def __init__(self): self.root = get_root_path() self.tasks = {} self.done = {} filemap = (('tasks', 'tasks'), ('done', 'done')) for kind, filename in filemap: path = os.path.join(self.root, filename) if os.path.exists(path): with open(path, 'r') as task_file: task_lines = [tl.strip() for tl in task_file if tl] tasks = map(task_from_taskline, task_lines) for task in tasks: if task is not None: getattr(self, kind)[task['id']] = task
def init(): import db.line from db.user import init_user from db.station import init_station from db.relation import init_relation from db.line import init_line from db.verify_code import init_verify_code platform_conf_path = util.get_root_path() + "/conf/platform.conf" database_conf_path = util.get_root_path() + "/conf/database.conf" deploy_conf = configparser.ConfigParser() deploy_conf.read([platform_conf_path, database_conf_path]) # init database db_name = deploy_conf.get("deploy", "name") init_database(db_name) # init table init_user() init_line() init_verify_code() init_station() init_relation() lines = db.line.line_list() return lines
def base_engine(): root_path = util.get_root_path() url = root_path + "/conf/database.conf" conf = configparser.ConfigParser() conf.read(url) username = conf.get("database", "username") password = conf.get("database", "password") engine = mysql.connector.connect(host="localhost", user=username, password=password, auth_plugin='mysql_native_password') cursor = engine.cursor() return engine, cursor
def init_user(): engine, cursor = db.engine.get_engine() sql = """ create table if not exists user( uuid char(27) not null, email varchar(30), username varchar(24) default "subway user", password varchar(18) not null, token char(10) not null, user_type enum("admin", "user") not null default "user", create_time datetime not null, status enum("active", "down", "lock") not null default "active", image varchar(100), primary key(email) ) charset utf8 """ cursor.execute(sql) logger.info("Setup user finished.") # add admin user conf_path = util.get_root_path() + "/conf/platform.conf" deploy_conf = ConfigParser() deploy_conf.read([conf_path]) uuid = util.generate_uuid() admin_user = deploy_conf.get("deploy", "admin_user") admin_pwd = deploy_conf.get("deploy", "admin_pwd") email = deploy_conf.get("deploy", "admin_email") image = "/root/image/localhost/default.jpeg" try: get_user_detail(email) except DBError: now = util.get_time_string_format() token = util.general_token() user_type = "admin" sql = "insert into user " \ "values(%s, %s, %s, %s, %s, %s, %s, default, %s)" val = (uuid, email, admin_user, admin_pwd, token, user_type, now, image) cursor.execute(sql, val) logger.info("Init default admin user success.") engine.commit() engine.close() return
def discover_testsuites(paths=[]): ''' returns dictionary mapping name to python file for all testsuites discovered in the usual places: kvarq root path, user home directory, current working directory, KVARQ_TESTSUITES environment variable, and any more paths specified paths as arguments -- later occurrences of the same testsuite override previous ''' testsuite_paths = {} # 1) discover in root path root_base = os.path.abspath(os.path.join(get_root_path(), 'testsuites')) lo.debug('discovering testsuites in root path') add_testsuites_dir(testsuite_paths, root_base) # 2) discover from $HOME base = os.path.join(expanduser('~'), 'kvarq_testsuites') lo.debug('discovering testsuites in home directory') add_testsuites_dir(testsuite_paths, base) # 3) discover from CWD if not in root path cwd_base = os.path.abspath('testsuites') if cwd_base != root_base: lo.debug('discovering testsuites in current working directory') add_testsuites_dir(testsuite_paths, cwd_base) # 4) discover from KVARQ_TESTSUITES from_env = os.environ.get('KVARQ_TESTSUITES') if from_env: lo.debug('discovering testsuites in $KVARQ_TESTSUITES') for base in from_env.split(os.path.pathsep): add_testsuites_dir(testsuite_paths, base) # 5) explicitely specified paths for base in paths: if os.path.isdir(base): lo.debug('discovering testsuites in "%s"' % base) add_testsuites_dir(testsuite_paths, base) else: lo.warning('could not find directory "%s"' % base) return testsuite_paths
def get_rsIDs_from_dataset(dataset, num_rsids=0): """ Removes '.' from the binary file input. :param num_rsids: The number of rsids to extract from the datasets :rtype: str :param dataset: The path to the .bed .bim .fam files whose '.' as rsIDs to be removed. """ root_path = util.get_root_path(dataset) dataset_filename = util.get_filename(dataset) dataset_bim = util.get_bed_bim_fam_from_bfile(dataset)['bim'] temp_extract_file = 'extract_{}.txt'.format(dataset_filename) output_file = '{}{}_{}'.format(root_path, dataset_filename, 'RS_ONLY') output_lines = set() with open(dataset_bim, 'r') as input_file: file_lines = input_file.readlines() if num_rsids > 0: file_lines = file_lines[:num_rsids] for line in file_lines: # print(line) if '.' not in line and not line.startswith('MT'): rs_id = re.search('rs[0-9]+', line) if rs_id: rs_id = rs_id.group(0).strip() output_lines.add(rs_id + '\n') with open(temp_extract_file, 'w+') as output: for line in output_lines: output.write(line) get_rs_ids_command = { 'bfile': dataset, 'extract': temp_extract_file, 'out': output_file } util.call_plink( get_rs_ids_command, command_key='Get only rsIDs from input .bim file [ {} ]'.format( dataset_filename)) # os.remove(temp_extract_file) return output_file
def clean_bim(bimfile_input, snp_ref): """ "Cleans" a .bim file by swapping :param bimfile_input: The .bim file to be cleaned. :param dataset: The binary file as a parameter from the command line argument. :param snp_ref: The SNP reference file that converts SNP ID's to rsID's """ root_directory = util.get_root_path(bimfile_input) snp_dict = dict() if snp_ref is not None: snp_ref_r = open(snp_ref, 'r') snp_ref_lines = snp_ref_r.readlines() for line in snp_ref_lines: if '#' in line: # if it's the header row continue row = line.split(',') snp_dict[row[1]] = row[2] # add snpID and rsID to dictionary bimfile_input_r = open(bimfile_input, 'r') bimfile_lines = bimfile_input_r.readlines() good_snpID_output_file = open( 'snpID.txt', 'a') # file to write out snpIDs that dont have an rsID for line in bimfile_lines: split_line = line.split('\t') # split by tabs if not split_line[1].startswith('rs'): # if not an rs id if (split_line[1] in snp_dict.keys() and snp_dict[split_line[1]] != '---') \ or split_line[1] not in snp_dict.keys(): good_snpID_output_file.write(split_line[1] + '\n') else: # replace snpID with rsID split_line[1] = snp_dict[split_line[1]] good_snpID_output_file.close( ) # https://stackoverflow.com/questions/7395542/is-explicitly-closing-files-important
from datetime import datetime from typing import Any, Dict, List, Optional # Config and data accessor config_accessor = ConfigAccessor.get_instance() db_accessor = DbAccessor.get_instance() # Debug mode debug = False #config_accessor.log_mode == 'DEBUG' # Global variable to control system's mode mode = 'NORMAL' # NORMAL/CAUTION owner = 'LeLong' relative_path = '../ESP32_CAM/images' IMG_DIR = os.path.join(get_root_path(), relative_path) try: os.makedirs(IMG_DIR) except: pass # Face recognizer face_recognizer = FaceRecognizer() class CautionThread(threading.Thread): def __init__(self, mqtt, sleep_interval=1): super().__init__() self._kill = threading.Event() self._interval = sleep_interval
def img_process_lab(): if request.method == 'GET': messages = [] types = ImgType.objects.exclude('id') for each in types: sub_types = ImgOperation.objects(type__name=each.name).exclude( 'id', 'type') messages.append({'type': each.name, 'subType': list(sub_types)}) return jsonify({'result': 1, 'message': messages}) else: request_body = json.loads(request.get_data()) operation = request_body['operations'][0] o_code = operation['code'] o_params = operation['params'] image_base64 = request_body['image'] try: img_name = base64_to_img_file(image_base64) except IOError as e: app.logger.exception(e) return jsonify({'result': 0, 'message': '图片传送失败,请再试一次或换张图'}) img_log = None img_arr = cv2.imread(os.path.join(get_root_path(), img_name), cv2.IMREAD_COLOR) if img_arr is None: return jsonify({'result': 0, 'message': '服务器空间不足,请联系QQ:644306737'}) try: if int(o_code) < 200: processed_img_arr = smooth_manager.process( o_code, o_params, img_arr) elif int(o_code) < 300: thresh, processed_img_arr = segmentation_manager.process( o_code, o_params, img_arr) img_log = dict({'str': '最佳阈值为{}'.format(thresh)}) elif int(o_code) < 400: processed_img_arr = contour_manager.process( o_code, o_params, img_arr) elif int(o_code) < 500: processed_img_arr = corner_manager.process( o_code, o_params, img_arr) elif int(o_code) < 600: processed_img_arr = morphology_manager.process( o_code, o_params, img_arr) else: return jsonify({'result': 0, 'message': '哎呀~该方法尚未完成,请静候佳音'}) except Exception as e: app.logger.exception(e) return jsonify({ 'result': 0, 'message': '处理失败:{},请联系QQ:644306737'.format(e.message) }) try: processed_img_name = img_arr_to_img_file(img_name, o_code, processed_img_arr) except IOError as e: app.logger.exception(e) return jsonify({'result': 0, 'message': '服务器空间不足,请联系QQ:644306737'}) try: base64_data = img_file_to_base64(processed_img_name) except Exception as e: app.logger.exception(e) return jsonify({ 'result': 0, 'message': '图片转换传输失败,请再试一次,或联系QQ:644306737' }) if img_log is None: message = dict({'image': base64_data}) else: message = dict({'image': base64_data, 'log': img_log}) delete_files([img_name, processed_img_name]) return jsonify({'result': 1, 'message': message})
""" import flask import flask_mail import configparser import db import logs import conductor import server_init import util import compute from api import logger from errors.service import StartException conf = configparser.ConfigParser() config_path = util.get_root_path() + "/conf/platform.conf" conf.read(config_path) host = conf.get("system", "host") port = conf.get("system", "port") stp_app = flask.Flask("SubwayTraffic") stp_app.config.update( MAIL_SERVER="smtp.qq.com", MAIL_PORT=465, MAIL_USE_SSL=True, MAIL_USERNAME=conf.get("deploy", "admin_email"), MAIL_PASSWORD=conf.get("deploy", "admin_email_pwd"), ) stp_email = flask_mail.Mail(stp_app) server_init.register(stp_app)
def __init__(self): dotenv.load_dotenv(self._dotenv_file) self._yaml_file = os.path.join(get_root_path(), self._yaml_file) self._yaml_dict = self._load_yaml()