def main(**kwargs): parse = argparse.ArgumentParser( prog=script_name, usage='{} KEY_WORD'.format(script_name), description='A tiny downloader that scrapes the web', add_help=True, ) parse.add_argument('-v', '--version', action='store_true', help='Print version and exit', required=False) parse.add_argument('KEY_WORD', nargs='*', type=str, help='search keyword') args = parse.parse_args() if args.version: print("%s: version %s" % (script_name, __version__)) sys.exit() if args.KEY_WORD: search({ "searchword": args.KEY_WORD[0].encode('gbk'), "searchtype": "-1" }) else: parse.print_help()
def get(self): parse = reqparse.RequestParser() parse.add_argument('task_id', type=str, required=True) args = parse.parse_args() task_id = args['task_id'] # retrieve file type conn = MySQLdb.connect(self.db_host, self.db_user, self.db_passwd, self.db_name) with conn: cursor = conn.cursor() cursor.execute("SELECT file_type from tasks where task_id =%s", (task_id, )) conn.commit() dataset = cursor.fetchall() # this task_id is not existed. if cursor.rowcount == 0: response_packet = { "msg": 'Bad request.', "ret": HTTP_400_BAD_REQUEST, "data": {} } return make_response(jsonify(response_packet), HTTP_400_BAD_REQUEST) print('Total Row(s) fetched:', cursor.rowcount) for row in dataset: file_type = row[0] self.mFileType = file_type # return thumbnail picture for specific task_id filepath = UPLOAD_FOLDER + "/" + task_id + "_thumbnail." + self.mFileType print(filepath) with open(filepath, "rb") as image: image_b64encode_string = base64.b64encode(image.read()) # result here: dict obj response_packet = { "msg": 'Access webpage success.', "ret": HTTP_200_SUCCESS, "data": { "task_id": task_id, "file_type": self.mFileType, "imageb64": image_b64encode_string, } } return make_response( jsonify(response_packet), HTTP_200_SUCCESS) # <- the status_code displayed code on console
def main(): import argparse parse = argparse.ArgumentParser() parse.add_argument("--root", type=str, help="your http file root", default='.') parse.add_argument("--port", type=int, help="http socket port", default=80) args = parse.parse_args() reg_ext_handler(".md", _md_handler) if platform.system() == 'Windows': os.startfile(f"http://localhost:{args.port}") start(args.root, args.port)
def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('host', help='Hostname to scan') parser.add_argument('-s', help='Resolve the status for each URL', action="store_true") parser.add_argument('-o', help='Include urls not at the host', action="store_true") parser.add_argument('--depth', help='Depth of search. Default: 3', default=3) return parser.parse_args()
def DivideAPI(): parse = reqparse.RequestParser() parse.add_argument('ip', type=str, help='错误的ip', default='192.168.5.1') parse.add_argument('username', type=str, help='错误的username', default='root') parse.add_argument('password', type=str, help='错误的password', default='123456') args = parse.parse_args() # 从前端请求中解析参数 Router_ip = args.get('ip') username = args.get('username') Router_password = args.get('password') # 这里调用(操作/验证)脚本 # 将Router fa0/0划分为两个子接口 flag, message = excute(Router_ip, username, Router_password, './scripts/Router.txt', socketio) data = {} ret = ReturnJ() ret.data = data ret.flag = flag ret.message = message return ret.toJson()
def VlanAPI(): parse = reqparse.RequestParser() parse.add_argument('ip', type=str, help='错误的ip', default='192.168.5.5') parse.add_argument('username', type=str, help='错误的username', default='root') parse.add_argument('password', type=str, help='错误的password', default='CISCO') args = parse.parse_args() # 获取当前文件夹id Switch_ip = args.get('ip') username = args.get('username') Switch_password = args.get('password') # 这里调用(操作/验证)脚本 # 配置Switch1划分VLAN10和VLAN20 flag, message = excute(Switch_ip, username, Switch_password, './scripts/Split_Vlan.txt', socketio) data = {} ret = ReturnJ() ret.data = data ret.flag = flag ret.message = message return ret.toJson()
def get(self): parse = reqparse.RequestParser() parse.add_argument('user_id', type=int, required=True) parse.add_argument('task_id', type=str, required=True) parse.add_argument('file_type', type=str, required=True) args = parse.parse_args() user_id = args['user_id'] task_id = args['task_id'] file_type = args['file_type'] response_data = self.getInternal(user_id, task_id, file_type) if response_data == "": response_packet = { "msg": 'bad request.', "ret": HTTP_400_BAD_REQUEST, "data": {} } return make_response(jsonify(response_packet), HTTP_400_BAD_REQUEST) # <- the status_code displayed code on console else: response_packet = { "msg": 'Access webpage success.', "ret": HTTP_200_SUCCESS, "data" : response_data, } return make_response(jsonify(response_packet), HTTP_200_SUCCESS) # <- the status_code displayed code on console
def post(self): parse = reqparse.RequestParser() parse.add_argument('user_file', type=FileStorage, location='files') parse.add_argument('user_id', type=int, location='form') args = parse.parse_args() file = args['user_file'] if file: if file.filename == '': response_data = { "msg": 'Upload filename is null.', "ret": HTTP_400_BAD_REQUEST } return make_response( jsonify(response_data), HTTP_400_BAD_REQUEST ) # <- the status_code displayed code on console print("recieved a new file.") fext, s = self.allowed_file(file.filename) if s: if not os.path.exists(UPLOAD_FOLDER): os.makedirs(UPLOAD_FOLDER) filename = secure_filename(file.filename) file.save(os.path.join(UPLOAD_FOLDER, filename)) filename_with_path = os.path.join(UPLOAD_FOLDER, filename) md5filename = self.md5(filename_with_path) md5filename_with_ext = md5filename + "." + fext md5thumbnail_with_ext = md5filename + "_thumbnail." + fext # record this file info in db. url = "http://localhost:5000/api/insert/record" values = { "user_id": args['user_id'], "task_id": md5filename, "file_type": fext } print("values=>", values) self.send_request(url, values) newfilename = os.path.join(UPLOAD_FOLDER, md5filename_with_ext) newfilename_thumbnail = os.path.join(UPLOAD_FOLDER, md5thumbnail_with_ext) if os.path.exists(newfilename): response_data = { "msg": 'Please do not upload the file repeatedly.', "ret": HTTP_400_BAD_REQUEST } return make_response(jsonify(response_data), HTTP_400_BAD_REQUEST) print("newfilename=>", newfilename) os.rename(filename_with_path, newfilename) try: baseheight = 320 img = Image.open(newfilename) hpercent = (baseheight / float(img.size[1])) wsize = int((float(img.size[0]) * float(hpercent))) img = img.resize((wsize, baseheight), Image.ANTIALIAS) img.save(newfilename_thumbnail) except IOError: print("cannot create thumbnail for '%s'" % newfilename_thumbnail) #if not os.path.exists(PREPROC_FOLDER): # os.makedirs(PREPROC_FOLDER) #destfile = os.path.join(PREPROC_FOLDER, md5filename_with_ext) #copyfile(newfilename, destfile) #self.preprocess_images(newfilename, preproc_filepath) response_data = { "data": { "user_id": args['user_id'], "task_id": md5filename, }, "msg": 'Upload file successfully', "ret": HTTP_201_CREATED } return make_response(jsonify(response_data), HTTP_201_CREATED) else: response_data = { "msg": 'Upload is not allowed filetype.', "ret": HTTP_400_BAD_REQUEST } return make_response(jsonify(response_data), HTTP_400_BAD_REQUEST) else: response_data = { "status": 'user_file is invalid.', "status_code": HTTP_400_BAD_REQUEST } return make_response(jsonify(response_data), HTTP_400_BAD_REQUEST)
outputfile = args.outputfile if args.versionFile: targetgenes = VersionConvert(args) else: targetgenes = EvidenceLevel(evidence, queryvcf, outputfile) if __name__ == "__main__": scriptpath = os.path.split(os.path.realpath(__file__))[0] parse = argparse.ArgumentParser(formatter_class = HelpFormatter, description = ''' Usage: python3 {scriptpath}/FilterSNP.py <args> <args>.... NOTE: This script was used to filterout SNPs that failed to pass GATK criterion and with missing rate in population above threshold. '''.format(scriptpath = scriptpath)) parse.add_argument('-query', '--queryvcf', required = True, dest = "queryvcf", help = "vcf file without intergenic regions info", metavar = "vcf file without intergenic regions info", type = str, nargs = '?') parse.add_argument('-outfile', '--out', required = True, dest = "outputfile", help = "outputfile record condidates SNP/Indel", metavar = "outputfile record condidates SNP/Indel", type = str, nargs = '?') parse.add_argument('-genenamefile', '--namefile', required = True, dest = "genefile", help = "condidate genes with their corresponding names of another version", type = str, nargs = '?') parse.add_argument('-logfile', '--logfile', required = True, dest = "logfile", help = "Log file to record procedures of processing of this script", metavar = "Log file to record procedures of processing of this script", type = str, nargs = '?') parse.add_argument('-evidence', '--evidence', required = True, dest = "evidence", default = "exonic downstream intronic splicing upstream UTR3 UTR5", help = "Positon where SNP/indel located in will be further studied", type = str, nargs = '*') versiongroup = parse.add_argument_group("verion conversion") versiongroup.add_argument('-versionFile', '--versionFile', required = False, dest = "versionFile", help = "Responding relationship between two versions", type = str, nargs = '?') versiongroup.add_argument('-versionname', '--versionname', required = False, dest = "versionName", help = "version name should be consistent with that in verionfile", type = str, nargs = '?') args = parse.parse_args() main(args)
def get(self): try: parse = reqparse.RequestParser() parse.add_argument('task_id', type=str, required=True) parse.add_argument('user_id', type=int, required=True) args = parse.parse_args() task_id = args['task_id'] user_id = args['user_id'] # retrieve file type from task_id conn= MySQLdb.connect(self.db_host, self.db_user, self.db_passwd, self.db_name) with conn: cursor = conn.cursor() cursor.execute("SELECT id from users where id = %s", (user_id, )) conn.commit() dataset = cursor.fetchall() if cursor.rowcount == 0: raise ValueError("invalid user_id:", user_id) cursor.execute("SELECT file_type from tasks where task_id =%s ", (task_id, )) conn.commit() dataset = cursor.fetchall() # this task_id is not existed. if cursor.rowcount == 0: raise ValueError("task_id is not existed.") print('Total Row(s) fetched:', cursor.rowcount) for row in dataset: file_type = row[0] self.mFileType = file_type if task_id and user_id and file_type: # attempt to retrieve info from backend directory. # bypass post2 if result exists. sdir = os.path.join(RESULT_FOLDER, task_id) response_file = os.path.join(sdir, 'response.json') if os.path.exists(sdir) and os.path.exists(response_file): with open(response_file, 'r') as file: json_string = json.load(file) response_packet = { "msg": 'Success.', "ret": HTTP_200_SUCCESS, "data": json_string, } return make_response(jsonify(response_packet), HTTP_200_SUCCESS) # <- the status_code displayed code on console res = self.post2(task_id, UPLOAD_FOLDER) if res['code'] == HTTP_400_BAD_REQUEST: raise ValueError('bad request in post2 query') else: IMGDIR=os.path.join(RESULT_FOLDER, task_id, "step1") # do tesseract to recognize the docnumber and doctype print("CMD=>", TESS_CMD + " " + RESULT_FOLDER +"/" + task_id + "/step1/roi-DocNumber.jpg" + " docnumres -l lancejie_fapiao3") if self.ocr_type == 'google': os.system(TESS_CMD + " " + RESULT_FOLDER + \ "/" + task_id + "/step1/roi-DocNumber.jpg" + " docnumres -l lancejie_fapiao3") os.system(TESS_CMD + " " + RESULT_FOLDER + \ "/" + task_id + "/step1/roi-DocType.jpg" + " doctyperes -l lancejie_shuipiao2") with open("docnumres.txt") as file: docnumres = file.read().rstrip() print("docnumres=>", docnumres) with open("doctyperes.txt") as file: doctyperes = file.read().rstrip() print("doctyperes=>", doctyperes) else: if self.ocr_type == 'baidu': fba=FetchBaiduApi.FetchBaiduApi(self.db_host, self.db_user, self.db_passwd, self.db_name) responseBaiduData = fba.getInternal(user_id, task_id, file_type) if responseBaiduData == "": if self.ocr_policy == 'tryout': responseBaiduData = fba.getInternal(user_id, task_id, file_type) if responseBaiduData == "": responseBaiduData = fba.getInternal(user_id, task_id, file_type) if responseBaidudata == "": print("Internal error happened!") raise ValueError("Internal server error", HTTP_407_INTERNAL_ERROR ) else: self.ocr_type=="google" else: print("response=>", responseBaiduData) with open(IMGDIR+"/roi-DocNumber.jpg", "rb") as image: # base64 encode read data # result: bytes docnum_b64encode_bytes = base64.b64encode(image.read()) docnum_b64encode_string= docnum_b64encode_bytes.decode('utf-8') with open(IMGDIR+"/roi-DocType.jpg", "rb") as image: doctype_b64encode_bytes = base64.b64encode(image.read()) doctype_b64encode_string = doctype_b64encode_bytes.decode('utf-8') if self.ocr_type == 'google': response_data = { "task_id": task_id, "user_id": user_id, "file_type": file_type, "words_result": { "InvoiceNum": docnumres, "InvoiceCode": doctyperes, }, "InvoiceNumEncode": docnum_b64encode_string, "InvoiceCodeEncode": doctype_b64encode_string, } else: if self.ocr_type == 'baidu': response_data = { "task_id": task_id, "user_id": user_id, "file_type": file_type, "InvoiceNumEncode": docnum_b64encode_string, "InvoiceCodeEncode": doctype_b64encode_string, "words_result": responseBaiduData, } # store the parse result with open(os.path.join(sdir, "response.json"), 'w') as outfile: # now encoding the data into json # result: string json_data=json.dumps(response_data) outfile.write(json_data) response_packet = { "msg": 'Access webpage success.', "ret": HTTP_200_SUCCESS, "data" : response_data, } return make_response(jsonify(response_packet), HTTP_200_SUCCESS) # <- the status_code displayed code on console else: raise ValueError("invalid user_id ,task_id or file_type", user_id, task_id, file_type) except ValueError as err: print(err.args) if err.args[1] == HTTP_407_INTERNAL_ERROR: response_packet = { "msg": 'Server internal error.', "ret": HTTP_407_INTERNAL_ERROR, "data": {} } return make_response(jsonify(response_packet), HTTP_400_BAD_REQUEST) # <- the status_code displayed code on console else: response_packet = { "msg": 'bad request.', "ret": HTTP_400_BAD_REQUEST, "data": {} } return make_response(jsonify(response_packet), HTTP_400_BAD_REQUEST) # <- the status_code displayed code on console
fname = fname.replace("#yenotroot#", root) print(f"Load SQL script {fname}") try: with conn.cursor() as c, open(fname, "r") as sqlfile: sql = sqlfile.read() c.execute(sql) conn.commit() except Exception as e: raise InitError(f"Error loading {ddl} -- {str(e)}") if __name__ == "__main__": parse = argparse.ArgumentParser("initialize a yenot database") parse.add_argument( "dburl", help= "database identifier in url form (e.g. postgresql://user@host/dbname)", ) parse.add_argument( "--db-reset", default=False, action="store_true", help="drop (if necessary) and recreate the database", ) parse.add_argument( "--db-create", default=False, action="store_true", help="create the database (error if existing)", ) parse.add_argument(
def get(self): try: parse = reqparse.RequestParser() parse.add_argument('task_id', type=str, required=True) parse.add_argument('user_id', type=int, required=True) args = parse.parse_args() task_id = args['task_id'] user_id = args['user_id'] # retrieve file type from task_id conn = MySQLdb.connect(self.db_host, self.db_user, self.db_passwd, self.db_name) with conn: cursor = conn.cursor() cursor.execute("SELECT id from users where id = %s", (user_id, )) conn.commit() dataset = cursor.fetchall() if cursor.rowcount == 0: raise ValueError("invalid user_id:", user_id) cursor.execute( "SELECT file_type from tasks where task_id =%s ", (task_id, )) conn.commit() dataset = cursor.fetchall() # this task_id is not existed. if cursor.rowcount == 0: raise ValueError("task_id is not existed.") print('Total Row(s) fetched:', cursor.rowcount) for row in dataset: file_type = row[0] self.mFileType = file_type if task_id and user_id and file_type: # attempt to retrieve info from backend directory. # bypass post2 if result exists. sdir = os.path.join(RESULT_FOLDER, task_id) response_file = os.path.join(sdir, 'response.json') if os.path.exists(sdir) and os.path.exists(response_file): with open(response_file, 'r') as file: json_string = json.load(file) response_packet = { "msg": 'Success.', "ret": HTTP_200_SUCCESS, "data": json_string, } return make_response( jsonify(response_packet), HTTP_200_SUCCESS ) # <- the status_code displayed code on console if self.ocr_type == 'baidu': fba = FetchBaiduApi.FetchBaiduApi(self.db_host, self.db_user, self.db_passwd, self.db_name) responseBaiduData = fba.getInternal( user_id, task_id, file_type) if responseBaiduData == "": if self.ocr_policy == 'tryout': responseBaiduData = fba.getInternal( user_id, task_id, file_type) if responseBaiduData == "": responseBaiduData = fba.getInternal( user_id, task_id, file_type) if responseBaiduData == "": print("Internal error happened!") raise ValueError("Internal server error", HTTP_407_INTERNAL_ERROR) else: print("response=>", responseBaiduData) response_data = { "task_id": task_id, "user_id": user_id, "file_type": file_type, "words_result": responseBaiduData, } # store the parse result with open(os.path.join(sdir, "response.json"), 'w') as outfile: # now encoding the data into json # result: string json_data = json.dumps(response_data) outfile.write(json_data) response_packet = { "msg": 'Access webpage success.', "ret": HTTP_200_SUCCESS, "data": response_data, } return make_response( jsonify(response_packet), HTTP_200_SUCCESS ) # <- the status_code displayed code on console else: raise ValueError("invliad ocr_type setting.") else: raise ValueError("invalid input setting:", task_id, user_id, file_type) except ValueError as err: print(err.args) if err.args[1] == HTTP_407_INTERNAL_ERROR: response_packet = { "msg": 'Server internal error.', "ret": HTTP_407_INTERNAL_ERROR, "data": {} } return make_response( jsonify(response_packet), HTTP_400_BAD_REQUEST ) # <- the status_code displayed code on console else: response_packet = { "msg": 'bad request.', "ret": HTTP_400_BAD_REQUEST, "data": {} } return make_response( jsonify(response_packet), HTTP_400_BAD_REQUEST ) # <- the status_code displayed code on console