def main(option): if option == '--aes': banner.head() password = input('[*] Pick a password: '******'[*] Enter the text to be encrypted: ') encrypted_text = func.aes(f'{text_to_aes}', password) print('\n[*]', encrypted_text) elif option == '--caesar': banner.head() text_to_binary = input('[*] Enter the text to be encrypted: ') while True: try: s = int(input("[*] Enter the shift: ") ) #how many shifts to encode in caesar break except ValueError: print('[!] Please enter a numerical value.') continue encrypt_text = func.caesar(text_to_binary, s) print('\n[*] Encrypted value:', "{}".format(encrypt_text)) elif option == '--md5': func.md5() elif option == '--rot13': func.rot13() elif option == '--base64': func.base64() elif option == '--sha256': func.sha256() elif option == '--hex': func.hex() elif option == '--help' or option == '-h': banner.usage()
def urls_add(self): add_url_list = list(set(self.__request_json['urls_add'])) # 去重 # 已存在queue中的 exist_queue_url_list = [] res = Mongo.get()['queue_' + self.project_name].find({'url_md5': {'$in': [md5(l) for l in add_url_list]}}, {'url': 1}) for doc in res: exist_queue_url_list.append(doc['url']) # 已存在parsed中的 exist_parsed_url_list = [] res = Mongo.get()['parsed_' + self.project_name].find({'url_md5': {'$in': [md5(l) for l in add_url_list]}}, {'url': 1}) for doc in res: # todo 需要判断存在的时间, 允许重复抓取 exist_parsed_url_list.append(doc['url']) # 加入队列 add_urls_data = [] for url in add_url_list: if url not in exist_queue_url_list and url not in exist_parsed_url_list: # 不存在queue不存在parsed中才加入队列 add_urls_data.append( {'domain': get_domain(url), 'url': url, 'url_md5': md5(url), 'flag_time': 0, 'add_time': int(time.time()), 'slave_ip': self.__request_address[0]}) add_urls_data and Mongo.get()['queue_' + self.project_name].insert(add_urls_data)
def add_fails_record(self, ip, fails=()): """ 根据失败的url抓取记录且一定时间达到一定数量则将url加入该slave (IP)的禁止名单中 """ self.__init_key(ip) start_time = int(time.time()) - 30 * 60 # 半小时前 start_time_clean = int(time.time()) - 120 * 60 # 两个小时前 # 判断时间清理掉一段时间之前的禁止名单 deny_domains = [] deny_domains_temp = copy.deepcopy(self.slave_record[ip]['deny_domains']) # 深复制 for item in deny_domains_temp: deny_domains.append(item['domain']) if item['add_time'] < start_time_clean: # 如果是指定时间之前添加的则清除掉该slave (IP)禁止名单 self.slave_record[ip]['deny_domains'].remove(item) for item in fails: domain = item[0] http_code = int(item[1]) add_time = item[2] if http_code == 403 and domain not in deny_domains: self.deny_urls_temp.setdefault(ip, {}) res = self.deny_urls_temp[ip].setdefault(domain, {'count': 0, 'time': []}) self.deny_urls_temp[ip][domain]['count'] += 1 self.deny_urls_temp[ip][domain]['time'].append(add_time) # 403 一定时间达到一定次数就加禁止入名单 if res['count'] == 10: # 半小时内达到一定次数 time_count = 0 for t in self.deny_urls_temp[ip][domain]['time']: if t > start_time: time_count += 1 if time_count < 10: # 未达到次数下限 continue # 加入禁止名单和清空临时数据 self.slave_record[ip]['deny_domains'].append({'domain': domain, 'add_time': int(time.time())}) del self.deny_urls_temp[ip] continue # 其他非403的处理 if http_code != 403: domain_md5 = md5(domain) # mongoDB不支持带.的key self.slave_record[ip]['error_domains'].setdefault(domain_md5, {}) self.slave_record[ip]['error_domains'][domain_md5].setdefault('domain', domain) self.slave_record[ip]['error_domains'][domain_md5].setdefault('add_time', int(time.time())) self.slave_record[ip]['error_domains'][domain_md5]['update_time'] = int(time.time()) self.slave_record[ip]['error_domains'][domain_md5].setdefault('http_code', {}) self.slave_record[ip]['error_domains'][domain_md5]['http_code'].setdefault(str(http_code), 0) self.slave_record[ip]['error_domains'][domain_md5]['http_code'][str(http_code)] += 1 continue
def urls_parsed(self): urls_data = [] url_list = [] for url in self.__request_json['urls_parsed']: self.__slave_record.add_parsed_record(self.__request_address[0]) url_list.append(url) urls_data.append( {'url': url, 'url_md5': md5(url), 'add_time': int(time.time()), 'slave_ip': self.__request_address[0]}) Mongo.get()['queue_' + self.project_name].remove({'url_md5': {'$in': [md5(l) for l in url_list]}}, multi=True) # 删除抓取完毕的队列 try: urls_data and Mongo.get()['parsed_' + self.project_name].insert(urls_data) except: try: for single_url in urls_data: single_url and Mongo.get()['parsed_' + self.project_name].insert_one(single_url) except Exception, error: print traceback.format_exc() print error print u'下面链接重复抓取的并重复保存到parsed_*中的记录' print single_url, '\r\n\r\n'
def save_project(): return jsonify({'success': False, 'msg': '禁止修改!'}) form_data = json.loads(request.data) # todo 需要验证表单数据 name_r = re.compile(r'^[0-9a-zA-Z_-]+$') if not name_r.search(form_data['name']): return jsonify({'success': False, 'msg': '计划名称必须满足正则规则: ^[0-9a-zA-Z_-]+$ '}) exists_project = list(Mongo.get()['projects'].find({'name': form_data['name']}, {'_id': 1, 'add_time': 1}).limit(1)) if 'edit' not in form_data and exists_project: return jsonify({'success': False, 'msg': '计划名称已经存在!'}) # 新增计划或更新计划 data = { 'name': form_data['name'], 'init_url': form_data['init_url'], 'desc': form_data['desc'] if 'desc' in form_data else '', 'code': form_data['code'], 'static': '暂停中', 'update_time': int(time.time()), 'add_time': exists_project[0]['add_time'] if exists_project else int(time.time()), } Mongo.get()['projects'].update({'name': form_data['name']}, data, True) # 当是新计划时的初始化 if 'edit' not in form_data: Mongo.get()['queue_' + form_data['name']].insert( { 'url': form_data['init_url'], 'url_md5': md5(form_data['init_url']), 'flag_time': 0, 'add_time': int(time.time()), 'slave_ip': '0.0.0.0' }) # 在没创建集合前设置索引mongodb会自动创建该集合并赋索引 Mongo.get()['parsed_' + form_data['name']].ensure_index('url_md5', unique=True) Mongo.get()['queue_' + form_data['name']].ensure_index('url_md5', unique=True) # 有新计划加入, 重启全部slave restart_slave_list = GlobalHelper.get('restart_slave_list') or [] for slave_record in Mongo.get()['slave_record'].find(): restart_slave_list.append(slave_record['ip']) GlobalHelper.set('restart_slave_list', list(set(restart_slave_list))) return jsonify({'success': True, 'msg': '保存成功!'})
def save_project(): form_data = json.loads(request.data) # todo 需要验证表单数据 name_r = re.compile(r"^[0-9a-zA-Z_-]+$") if not name_r.search(form_data["name"]): return jsonify({"success": False, "msg": "计划名称必须满足正则规则: ^[0-9a-zA-Z_-]+$ "}) exists_project = list(Mongo.get()["projects"].find({"name": form_data["name"]}, {"_id": 1, "add_time": 1}).limit(1)) if "edit" not in form_data and exists_project: return jsonify({"success": False, "msg": "计划名称已经存在!"}) # 新增计划或更新计划 data = { "name": form_data["name"], "init_url": form_data["init_url"], "desc": form_data["desc"] if "desc" in form_data else "", "code": form_data["code"], "static": "暂停中", "update_time": int(time.time()), "add_time": exists_project[0]["add_time"] if exists_project else int(time.time()), } Mongo.get()["projects"].update({"name": form_data["name"]}, data, True) # 当是新计划时的初始化 if "edit" not in form_data: Mongo.get()["queue_" + form_data["name"]].insert( { "url": form_data["init_url"], "url_md5": md5(form_data["init_url"]), "flag_time": 0, "add_time": int(time.time()), "slave_ip": "0.0.0.0", } ) # 在没创建集合前设置索引mongodb会自动创建该集合并赋索引 Mongo.get()["parsed_" + form_data["name"]].ensure_index("url_md5", unique=True) Mongo.get()["queue_" + form_data["name"]].ensure_index("url_md5", unique=True) # 有新计划加入, 重启全部slave restart_slave_list = GlobalHelper.get("restart_slave_list") or [] for slave_record in Mongo.get()["slave_record"].find(): restart_slave_list.append(slave_record["ip"]) GlobalHelper.set("restart_slave_list", list(set(restart_slave_list))) return jsonify({"success": True, "msg": "保存成功!"})
def get_mol_data(self, data={}): ''' get_mol_data([data]) -- Returns a dict. calculats the mol data args: data: dict, in which the data stores. ''' desc = self.mol.calcdesc() ob_can_smi = self.gen_openbabel_can_smiles() data.update({ 'TPSA': desc['TPSA'], 'LogP': desc['logP'], 'MR': desc['MR'], 'FORMULA': self.mol.formula, 'MOLWEIGHT': self.mol.molwt, 'EXACTMASS': self.mol.exactmass, 'CHARGE': self.mol.charge, 'OPENBABEL_CAN_SMILES': ob_can_smi, 'MD5_OPENBABEL_CAN_SMILES': md5(ob_can_smi) }) return data
def get_mol_data(self, data = {}): ''' get_mol_data([data]) -- Returns a dict. calculats the mol data args: data: dict, in which the data stores. ''' desc = self.mol.calcdesc() ob_can_smi = self.gen_openbabel_can_smiles() data.update( { 'TPSA': desc['TPSA'], 'LogP': desc['logP'], 'MR': desc['MR'], 'FORMULA': self.mol.formula, 'MOLWEIGHT': self.mol.molwt, 'EXACTMASS': self.mol.exactmass, 'CHARGE': self.mol.charge, 'OPENBABEL_CAN_SMILES': ob_can_smi, 'MD5_OPENBABEL_CAN_SMILES': md5(ob_can_smi) } ) return data
def scanfile(api_key, url): #Prompt user for file path file_path = input( "Enter full file path (Use use forward slashes for Windows): ") #Parse Input and perform error checking while True: try: md5sum = md5(file_path) except: print("There was an error, please check your input") file_path = input( "Enter full file path (Use use forward slashes for Windows): ") else: break filename = file_name(file_path) #Input user information into JSON data = { "request": [{ "md5": md5sum, "features": ["te", "av"], "te": { "reports": ["xml", "pdf"] } }] } #Assemble the header headers = { "User-Agent": "python-api-wrapper", "Accept": "*/*", "Content-Type": "application/json", "Content-Length": str(len(data)), "Authorization": api_key } #Send the request and parse the reply try: #Define Global variable global status_code print("\n") print("Scanning Now...") response = requests.post(url + "query", json=data, headers=headers, verify=False) response_json = json.loads(response.content) status_code = response_json['response'][0]['status']['code'] print("\n") print("Message: " + response_json['response'][0]['status']['message']) print("\n") print("Threat Emulation Results:") print("Verdict: " + response_json['response'][0]['te']['images'][0] ['report']['verdict']) print("Status: " + response_json['response'][0]['te']['images'][0]['status']) print("\n") print("Anti-Virus Results:") print("Message: " + response_json['response'][0]['av']['status']['message']) av_label = response_json['response'][0]['av']['status']['label'] print("Label: " + av_label) if av_label == "FOUND": print("Signature Name: " + response_json['response'][0]['av'] ['malware_info']['signature_name']) print("Severity: " + str(response_json['response'][0]['av'] ['malware_info']['severity'])) print("Confidence: " + str(response_json['response'][0]['av'] ['malware_info']['confidence'])) print("\n") except: print("Query Failed. Please try again.") #Checking to see if user wants to upload file. if status_code == 1004: while True: print("Would you like to upload your file for emulation?") print(""" 1. Upload file for emulation 2. Do not upload file """) selection = input("Select a task number: ") if selection == "1": print("\nUploading File...") upload(api_key, url, file_path, md5sum, filename) return elif selection == "2": print("\n") return_to_menu() return selection = None else: print("\n Not Valid Choice Try again") print("\n") return_to_menu()
def add_fails_record(self, ip, fails=()): """ 根据失败的url抓取记录且一定时间达到一定数量则将url加入该slave (IP)的禁止名单中 """ self.__init_key(ip) start_time = int(time.time()) - 30 * 60 # 半小时前 start_time_clean = int(time.time()) - 120 * 60 # 两个小时前 # 判断时间清理掉一段时间之前的禁止名单 deny_domains = [] deny_domains_temp = copy.deepcopy( self.slave_record[ip]['deny_domains']) # 深复制 for item in deny_domains_temp: deny_domains.append(item['domain']) if item['add_time'] < start_time_clean: # 如果是指定时间之前添加的则清除掉该slave (IP)禁止名单 self.slave_record[ip]['deny_domains'].remove(item) for item in fails: domain = item[0] http_code = int(item[1]) add_time = item[2] if http_code == 403 and domain not in deny_domains: self.deny_urls_temp.setdefault(ip, {}) res = self.deny_urls_temp[ip].setdefault( domain, { 'count': 0, 'time': [] }) self.deny_urls_temp[ip][domain]['count'] += 1 self.deny_urls_temp[ip][domain]['time'].append(add_time) # 403 一定时间达到一定次数就加禁止入名单 if res['count'] == 10: # 半小时内达到一定次数 time_count = 0 for t in self.deny_urls_temp[ip][domain]['time']: if t > start_time: time_count += 1 if time_count < 10: # 未达到次数下限 continue # 加入禁止名单和清空临时数据 self.slave_record[ip]['deny_domains'].append({ 'domain': domain, 'add_time': int(time.time()) }) del self.deny_urls_temp[ip] continue # 其他非403的处理 if http_code != 403: domain_md5 = md5(domain) # mongoDB不支持带.的key self.slave_record[ip]['error_domains'].setdefault( domain_md5, {}) self.slave_record[ip]['error_domains'][domain_md5].setdefault( 'domain', domain) self.slave_record[ip]['error_domains'][domain_md5].setdefault( 'add_time', int(time.time())) self.slave_record[ip]['error_domains'][domain_md5][ 'update_time'] = int(time.time()) self.slave_record[ip]['error_domains'][domain_md5].setdefault( 'http_code', {}) self.slave_record[ip]['error_domains'][domain_md5][ 'http_code'].setdefault(str(http_code), 0) self.slave_record[ip]['error_domains'][domain_md5][ 'http_code'][str(http_code)] += 1 continue
def scan_directory(api_key, url, file_path, report_path, rname): #Open file and write to report f = open(report_path + rname, "a") #Parse Input and perform error checking while True: try: md5sum = md5(file_path) except: f.write(file_path + " does not exist or has a problem with it.") else: break filename = file_name(file_path) #Input user information into JSON data = { "request": [{ "md5": md5sum, "features": ["te", "av"], "te": { "reports": ["xml", "pdf"] } }] } #Assemble the header headers = { "User-Agent": "python-api-wrapper", "Accept": "*/*", "Content-Type": "application/json", "Content-Length": str(len(data)), "Authorization": api_key } #Send the request and parse the reply try: #Define Global variable global status_code print("\n") print("Scanning " + file_path) response = requests.post(url + "query", json=data, headers=headers, verify=False) response_json = json.loads(response.content) status_code = response_json['response'][0]['status']['code'] av_label = response_json['response'][0]['av']['status']['label'] f.write("Results for " + file_path + "\n") f.write('Message: ' + response_json['response'][0]['status']['message'] + '\n') f.write('Threat Emulation Results: \n\n') f.write('Verdict: ' + response_json['response'][0]['te']['images'][0] ['report']['verdict'] + '\n') f.write('Status: ' + response_json['response'][0]['te']['images'][0]['status'] + '\n') f.write('Anti-Virus Results: \n\n') f.write('Message: ' + response_json['response'][0]['av']['status']['message'] + '\n') f.write('Label: ' + av_label + '\n') if av_label == "FOUND": f.write('Signature Name: ' + response_json['response'][0]['av'] ['malware_info']['signature_name'] + '\n') f.write('Severity: ' + str(response_json['response'][0]['av'] ['malware_info']['severity']) + '\n') f.write('Confidence: ' + str(response_json['response'][0]['av'] ['malware_info']['confidence']) + '\n') f.write("Info to check results: " + file_path + " " + md5sum + '\n\n\n') except: f.write(file_path + ' Encountered and error. Results not available" \n') f.write('\n') f.write('\n') #Checking to see if user wants to upload file. if status_code == 1004: #Retrieve Extension file_ext = file_extension(filename) #Input variables into JSON data = { "request": { "md5": md5sum, "file_name": filename, "file_type": file_ext, "features": ["te"], "te": { "reports": ["pdf", "xml"], "images": [{ "id": "7e6fe36e-889e-4c25-8704-56378f0830df", "revision": 1 }, { "id": "e50e99f3-5963-4573-af9e-e3f4750b55e2", "revision": 1 }] } } } files = { 'json': (None, json.dumps(data), 'application/json'), 'file': (filename, open(file_path, 'rb'), 'application/octet-stream') } #Assemble the header headers = {"Authorization": api_key} #send request try: response = requests.post(url + "upload", headers=headers, files=files) response_json = json.loads(response.content) f.write('Upload_Status: ' + response_json['response']['status']['message'] + '\n') except: f.write('There was a problem when uploading the file') f.close()
def gen_search_sql_tuple(self, smiles, search_type="1", out_put_type=1): ''' gen_search_sql_tuple(smiles[, search_type[, out_put_type]]) -- Returns a tuple args: smiles: str, smiles that represents a molecule. search_type: str. values: '1': full march, '2': target structure contains substructure, '3': target structure dosen't contains substructure '4': target structure contains superstructure, '5': target structure dosen't contains superstructure out_put_type: int. values: 1 or not 2: without the mol_stat part. 2: contains the mol_stat part generates the search sql string according to the smiles string. returns a tuple contains two elements: 0: stat, some statistic of the mol such as number of C, number of rings and etc. 1: the sql part, sql not fully generated, only a part like this " FROM ... WHERE ... " ''' fp_sql = "" stat = {} head_flag = '(' end_flag = ')' stat_part = '' md5_ob_can_smi_key = self.env['MD5_OPENBABEL_CAN_SMI_KEY'] fp_bits_key = self.env['FP_BITS_KEY'] sign_mol_stat = ' >= ' try: num_H_key = self.env['NUM_H_KEY'] except: num_H_key = 'NUM_H' bool_op = " AND " mol_fp = {} mol = mymol('smi', smiles) mol.get_fps(mol_fp) if search_type == "1": if md5_ob_can_smi_key in self.recorded_fields_dict.keys(): return ({}, self.recorded_fields_dict[md5_ob_can_smi_key] + " = '" + md5(mymol('smi', smiles).gen_openbabel_can_smiles()) + "'") else: return ({}, '') elif search_type in ("2", "4"): mol_stat = {} tmp_stat = {} mol.get_mol_stat(mol_stat) if mol_stat.has_key(num_H_key): mol_stat.pop(num_H_key) tmp_stat[str(fp_bits_key)] = mol_fp[fp_bits_key] tmp_stat.update(mol_stat) if search_type == '2': sign_mol_stat = ' >= ' stat['sub'] = tmp_stat elif search_type == '4': sign_mol_stat = ' <= ' stat['sup'] = tmp_stat bool_op = ' AND ' elif search_type in ("3", "5"): bool_op = ' AND ' head_flag = ' NOT (' fps_keys = mol_fp.items() fps_keys.sort() for i, j in fps_keys: if self.recorded_fields_dict.has_key( i) and i != fp_bits_key and j != 0: if search_type in ('2', '3'): fp_sql += self.recorded_fields_dict[i] + " & " + str( j) + ' = ' + str(j) + bool_op elif search_type in ('4', '5'): field = self.recorded_fields_dict[i] fp_sql += field + " & " + str(j) + ' = ' + field + bool_op if out_put_type == 2 and search_type in ('2', '4'): val_k = [] for k, v in tmp_stat.items(): for j in self.def_dict.values(): if j.has_key(k): val_k = j[k][0] break if val_k: stat_part += str(val_k) + sign_mol_stat + str(v) + ' AND ' if stat_part: head_flag = '(' + stat_part.rstrip('AND ') + ') AND (' if fp_sql.rstrip(bool_op): return (stat, head_flag + fp_sql.rstrip(bool_op) + end_flag) else: return ({}, '1')
def save_project(): form_data = json.loads(request.data) # todo 需要验证表单数据 name_r = re.compile(r'^[0-9a-zA-Z_-]+$') if not name_r.search(form_data['name']): return jsonify({ 'success': False, 'msg': '计划名称必须满足正则规则: ^[0-9a-zA-Z_-]+$ ' }) exists_project = list(Mongo.get()['projects'].find( { 'name': form_data['name'] }, { '_id': 1, 'add_time': 1 }).limit(1)) if 'edit' not in form_data and exists_project: return jsonify({'success': False, 'msg': '计划名称已经存在!'}) # 新增计划或更新计划 data = { 'name': form_data['name'], 'init_url': form_data['init_url'], 'desc': form_data['desc'] if 'desc' in form_data else '', 'code': form_data['code'], 'static': '暂停中', 'update_time': int(time.time()), 'add_time': exists_project[0]['add_time'] if exists_project else int(time.time()), } Mongo.get()['projects'].update({'name': form_data['name']}, data, True) # 当是新计划时的初始化 if 'edit' not in form_data: Mongo.get()['queue_' + form_data['name']].insert({ 'url': form_data['init_url'], 'url_md5': md5(form_data['init_url']), 'flag_time': 0, 'add_time': int(time.time()), 'slave_ip': '0.0.0.0' }) # 在没创建集合前设置索引mongodb会自动创建该集合并赋索引 Mongo.get()['parsed_' + form_data['name']].ensure_index('url_md5', unique=True) Mongo.get()['queue_' + form_data['name']].ensure_index('url_md5', unique=True) # 有新计划加入, 重启全部slave restart_slave_list = GlobalHelper.get('restart_slave_list') or [] for slave_record in Mongo.get()['slave_record'].find(): restart_slave_list.append(slave_record['ip']) GlobalHelper.set('restart_slave_list', list(set(restart_slave_list))) return jsonify({'success': True, 'msg': '保存成功!'})
def gen_search_sql_tuple(self, smiles, search_type="1", out_put_type=1): """ gen_search_sql_tuple(smiles[, search_type[, out_put_type]]) -- Returns a tuple args: smiles: str, smiles that represents a molecule. search_type: str. values: '1': full march, '2': target structure contains substructure, '3': target structure dosen't contains substructure '4': target structure contains superstructure, '5': target structure dosen't contains superstructure out_put_type: int. values: 1 or not 2: without the mol_stat part. 2: contains the mol_stat part generates the search sql string according to the smiles string. returns a tuple contains two elements: 0: stat, some statistic of the mol such as number of C, number of rings and etc. 1: the sql part, sql not fully generated, only a part like this " FROM ... WHERE ... " """ fp_sql = "" stat = {} head_flag = "(" end_flag = ")" stat_part = "" md5_ob_can_smi_key = self.env["MD5_OPENBABEL_CAN_SMI_KEY"] fp_bits_key = self.env["FP_BITS_KEY"] sign_mol_stat = " >= " try: num_H_key = self.env["NUM_H_KEY"] except: num_H_key = "NUM_H" bool_op = " AND " mol_fp = {} mol = mymol("smi", smiles) mol.get_fps(mol_fp) if search_type == "1": if md5_ob_can_smi_key in self.recorded_fields_dict.keys(): return ( {}, self.recorded_fields_dict[md5_ob_can_smi_key] + " = '" + md5(mymol("smi", smiles).gen_openbabel_can_smiles()) + "'", ) else: return ({}, "") elif search_type in ("2", "4"): mol_stat = {} tmp_stat = {} mol.get_mol_stat(mol_stat) if mol_stat.has_key(num_H_key): mol_stat.pop(num_H_key) tmp_stat[str(fp_bits_key)] = mol_fp[fp_bits_key] tmp_stat.update(mol_stat) if search_type == "2": sign_mol_stat = " >= " stat["sub"] = tmp_stat elif search_type == "4": sign_mol_stat = " <= " stat["sup"] = tmp_stat bool_op = " AND " elif search_type in ("3", "5"): bool_op = " AND " head_flag = " NOT (" fps_keys = mol_fp.items() fps_keys.sort() for i, j in fps_keys: if self.recorded_fields_dict.has_key(i) and i != fp_bits_key and j != 0: if search_type in ("2", "3"): fp_sql += self.recorded_fields_dict[i] + " & " + str(j) + " = " + str(j) + bool_op elif search_type in ("4", "5"): field = self.recorded_fields_dict[i] fp_sql += field + " & " + str(j) + " = " + field + bool_op if out_put_type == 2 and search_type in ("2", "4"): val_k = [] for k, v in tmp_stat.items(): for j in self.def_dict.values(): if j.has_key(k): val_k = j[k][0] break if val_k: stat_part += str(val_k) + sign_mol_stat + str(v) + " AND " if stat_part: head_flag = "(" + stat_part.rstrip("AND ") + ") AND (" if fp_sql.rstrip(bool_op): return (stat, head_flag + fp_sql.rstrip(bool_op) + end_flag) else: return ({}, "1")