def add_item(id): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) if verify_user(args['username'], args['password']) is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) username = args['username'] request_args = { 'name': fields.Str(required=True), 'number': Arg(int, required=True) } args = parser.parse(request_args, request) item = Item() # verify inventory inventory = get_inventory_from_db(id) if inventory is None: return response_not_found(Inventory.__name__, id) if verify_user_access(username, inventory): item.inventory_id = inventory.id item.name = args['name'] item.number = args['number'] if db_add(item) is True: return response_success(ItemSchema(item).data) return make_response(jsonify({'success': False, 'data': 'Item {} not added'.format(item.name)})) return make_response(jsonify( {'success': False, 'error': 'User {} not authorized to access Inventory id:{}'.format(username, id)}))
def execute_test(test_name, conn): """ Carry out the execution of a test. This involves setting up a fresh database to interact with and parsing the target file Args: test_name (string): name of the .ged file in the ged directory which we are going to test conn (connection): an SQLite connection object which represents the database we plan on writing information to Returns: None """ reset_db(conn) parse('./ged/' + test_name, conn)
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') if not os.path.exists(self.domain): os.mkdir(self.domain) print '[+] Download and parse index file ...' data = self._request_data(sys.argv[-1] + '/index') with open('index', 'wb') as f: f.write(data) self.queue = Queue.Queue() blacklist_extensions = (".jpg", ".gif", ".css", ".jpeg", ".png", ".ttf", ".woff", ".eot", ".otf") for entry in parse('index'): if "sha1" in entry.keys(): if not entry["name"].encode('utf-8').strip().lower().endswith( blacklist_extensions): self.queue.put( (entry["sha1"].strip(), entry["name"].strip())) try: print entry['name'] except: pass self.lock = threading.Lock() self.thread_count = 20 self.STOP_ME = False
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') if not os.path.exists(self.domain): os.mkdir(self.domain) print '[+] Download and parse index file ...' # proxy_handle = urllib2.ProxyHandler({"http":'218.78.210.190:8080'}) # opener = urllib2.build_opener(proxy_handle) # urllib2.install_opener(opener) data = urllib2.urlopen(sys.argv[-1] + '/index').read() with open('index', 'wb') as f: f.write(data) self.queue = Queue.Queue() for entry in parse('index'): if "sha1" in entry.keys(): self.queue.put((entry["sha1"].strip(), entry["name"].strip())) self.lock = threading.Lock() self.thread_count = 20 self.STOP_ME = False self.proxys = [] try: with open('proxy_ok.txt', 'r') as fd_proxy: for line in fd_proxy: if line and line.strip(): if line.strip().endswith(":443"): proxy = {line.strip():'https'} else: proxy = {line.strip():'http'} print 'proxy',proxy self.proxys.append(proxy) except Exception,e: print 'error',e
def ssn(content): _list_ = parse(content).getssn() if _list_ != None or _list_ != []: if len(_list_) >= 2: plus('US Social Security Number disclosure: %s'%(str(_list_).split('[')[1].split(']')[0])) elif len(_list_) == 1: plus('US Social Security Number disclosure: %s'%_list_[0])
def privateip(content): _list_ = parse(content).getip() if _list_ != None or _list_ != []: if len(_list_) >= 2: plus('Private IP address disclosure: %s'%(str(_list_).split('[')[1].split(']')[0])) elif len(_list_) == 1: plus('Private IP address disclosure: %s'%_list_[0])
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') print('[+] Download and parse index file ...') try: data = self._request_data(sys.argv[-1] + '/index') except Exception as e: print('[ERROR] index file download failed: %s' % str(e)) exit(-1) with open('index', 'wb') as f: f.write(data) if not os.path.exists(self.domain): os.mkdir(self.domain) self.dest_dir = os.path.abspath(self.domain) self.queue = Queue.Queue() for entry in parse('index'): if "sha1" in entry.keys(): entry_name = entry["name"].strip() if self.is_valid_name(entry_name): self.queue.put((entry["sha1"].strip(), entry_name)) try: print('[+] %s' % entry['name']) except Exception as e: pass self.lock = threading.Lock() self.thread_count = 10 self.STOP_ME = False
def emails(content): _list_ = parse(content).getmail() if _list_ != None or _list_ != []: if len(_list_) >= 2: plus('Email address disclosure: %s'%(str(_list_).split('[')[1].split(']')[0])) elif len(_list_) == 1: plus('Email address disclosure: %s'%_list_[0])
def generate_share_code(id): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) if verify_user(args['username'], args['password']) is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) inventory = db.session.query(Inventory).get(id) if inventory is None: return response_not_found(Inventory.__name__, id) if verify_user_access(args['username'], inventory): code = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(8)) inventory.share_code = code if db_add(inventory): return response_success(InventorySchema(inventory, only=('id', 'share_code')).data) return make_response( jsonify({'success': False, 'error': 'Share code not generated for Inventory id:{}'.format(id)})) return make_response(jsonify( {'success': False, 'error': 'User {} not authorized to access Inventory id:{}'.format(args['username'], id)}))
def creditcards(content): _list_ = parse(content).getcc() if _list_ != None or _list_ != []: if len(_list_) >= 2: plus('Credit card number disclosure: %s' % (str(_list_).split('[')[1].split(']')[0])) elif len(_list_) == 1: plus('Credit card number disclosure: %s' % _list_[0])
def main(): geneSymbols, toJSON = parse() geneIDs = toID(geneSymbols) symbolMap = {symbol: gID for symbol, gID in zip(geneSymbols, geneIDs)} # TODO: this fails silently for unrecognized gene symbols samples = retrieve(symbolMap) outputDict = summarize(samples, symbolMap) formatAndPrint(outputDict, toJSON=toJSON)
def scrape_mep_declaration(id): doc = get_html_page(mep_profile_url(id)) url = declarations_url(doc) if (not url): return None pdf = download_declaration(id, url) data = parse(pdf) return data
def upload(): """ Handles file uploading. :return: Template """ if request.method == 'POST': creator = request.form['creator'] f = request.files['file'] _format = "%Y-%m-%d %H:%M:%S" now = datetime.datetime.utcnow().strftime(_format) filename = secure_filename(now + '_' + str(creator) + '_' + f.filename) f.save('files/'+filename) parse(creator, filename) return redirect(url_for('page.downloads')) return render_template('page/upload.html')
def edit_inventory(id): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) username = args['username'] if verify_user(username, args['password']) is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) request_args = { 'name': fields.Str(allow_missing=True), 'description': fields.Str(allow_missing=True) } args = parser.parse(request_args, request) print args if len(args) == 0: return make_response(jsonify({'success': False, 'data': 'No changes provided'})) inventory = get_inventory_from_db(id) if inventory is None: return response_not_found(Inventory.__name__, id) if verify_user_access(username, inventory): if 'name' in args: inventory.name = args['name'] if 'description' in args: inventory.description = args['description'] inventory.updated = datetime.datetime.utcnow() if db_add(inventory) is True: return response_success(inventory_schema_simple(inventory)) return make_response(jsonify({'success': False, 'data': 'Inventory {} was not updated'.format(inventory.name)})) return make_response(jsonify( {'success': False, 'error': 'User {} not authorized to access Inventory id:{}'.format(username, id)}))
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') if not os.path.exists(self.domain): os.mkdir(self.domain) print '[+] Download and parse index file ...' data = urllib2.urlopen(sys.argv[-1] + '/index').read() with open('index', 'wb') as f: f.write(data) self.queue = Queue.Queue() for entry in parse('index'): if "sha1" in entry.keys(): self.queue.put((entry["sha1"].strip(), entry["name"].strip())) self.lock = threading.Lock()
def Git_Downloader(self): print("[test]./SourceDownloader/" + self.domain +"/index") #try: for entry in parse("./SourceDownloader/" + self.domain +"/index"): print("[+]index文件:",entry.keys()) if "sha1" in entry.keys(): print("[+++]索引文件有完整的keys:字段name和sha1") #try: print(entry['name'].strip()) file_dir=(entry['name'].strip()) front_two = re.search("..",entry["sha1"]).group() #取出原sha1前两位 #.git会将sha1前两位取出作文件夹,再放入文件,将剩余的sha1作文件名,形如/7c/6a5b54ad7998 sha1_folder = re.sub(front_two,"",entry["sha1"],count=1)#原sha1去除前两位即为文件名 print(front_two) print(entry["sha1"]) data = requests.get(self.url +"/.git/objects/"+front_two+"/"+sha1_folder).content try: data = zlib.decompress(data) #解码为str进行文字操作 except: pass try: data = data.decode() #解码为str进行文字操作 data = re.sub('blob \d+\00', '', data).encode()#去除掉自带的blob ***(一些数字)再编码为byte避免写入报错,python3问题 except: data = data.decode("utf8", "ignore") print(data) #data = re.sub('blob \d+\00', '', data) print("StripeD:::"+data) data = data.encode() #break #data = re.sub('blob \d+\00', '', data).encode() #诸如图片类文件没有blob ***,跳过 #print("[+]data:"+data) target_dir = "./SourceDownloader/"+self.domain+"/"+os.path.dirname(file_dir) if target_dir and not os.path.exists(target_dir): os.makedirs(target_dir) file = open("./SourceDownloader/"+self.domain+"/"+file_dir, 'wb+') file.write(data) file.close() print('[OK]'+ file_dir) pass #except Exception as e: #print(e) #pass #except: #print("hi") #pass else: print("[---]索引文件没有完整的keys:字段name和sha1")
def make_inventory(): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) print args user = verify_user(args['username'], args['password']) if user is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) username = args['username'] request_args = { 'name': fields.Str(required=True), 'description': fields.Str() } args = parser.parse(request_args, request) inventory = Inventory() inventory.name = args['name'] inventory.description = args['description'] inventory.updated = datetime.datetime.utcnow() user.inventories.append(inventory) if db_add(inventory) is True: if db_add(user) is True: return response_success(inventory_schema_simple(inventory), 201) db_delete(inventory) return make_response( jsonify({'success': False, 'error': 'Inventory was not added to user {}'.format(username)})) return make_response(jsonify({'success': False, 'error': 'Inventory {} was not created'.format(args['name'])}), 400)
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') if not os.path.exists(self.domain): os.mkdir(self.domain) print '[+] Download and parse index file ...' data = urllib2.urlopen(sys.argv[-1] + '/index').read() with open('index', 'wb') as f: f.write(data) self.queue = Queue.Queue() for entry in parse('index'): if "sha1" in entry.keys(): self.queue.put((entry["sha1"].strip(), entry["name"].strip())) self.lock = threading.Lock() self.thread_count = 20 self.STOP_ME = False
def login(): if current_user.is_authenticated(): return redirect('/') if request.method == 'POST': print 'post' args = {'username': fields.Str(required=True), 'password': fields.Str(required=True)} args = parser.parse(args, request) print args if validate_user(args['username'], args['password']): login_user(User.get(args['username'])) return redirect('/') else: return render_template('login.html', error='Login failed') if request.method == 'GET': return render_template('login.html')
def createModel(): global vertic_picture, norm_picture, vertic_picture_haut, norm_picture_haut, vertic_picture_bas, norm_picture_bas global vertic_target, norm_target, color_target global amplitudeInit, thetaCible, rayonCible, nbCibles, env_haut_bas, amplitudeCible, hauteur if test: #parse fichier d'entree vertic_picture, norm_picture = parser.parse( nameFile, reverse) #le 2eme param sert à inverser les sommets cameraZ = 10 elif expe: #cré model amplitudeCible = 6 amplitudeInit = libExpe.radius_TargetToInit(amplitudeCible) rayonCible = 1 nbCibles = 9 env_haut_bas = 0 hauteur = 10 nbAnneaux = 2 vertic_picture, norm_picture, cameraZ, vertic_target, norm_target = draw.drawExpe( amplitudeInit, rayonCible, hauteur, nbAnneaux, env_haut_bas, nbCibles) color_target, thetaCible = draw.changeTargetsColor( nbCibles, targetOrder[0]) vertic_picture = numpy.array(vertic_picture, dtype='float32') norm_picture = numpy.array(norm_picture, dtype='float32') vertic_target = numpy.array(vertic_target, dtype='float32') norm_target = numpy.array(norm_target, dtype='float32') color_target = numpy.array(color_target, dtype='float32') vertic_picture_haut, norm_picture_haut, n = draw.drawEnv( 2 * amplitudeCible, rayonCible, hauteur, nbAnneaux, 1) vertic_picture_bas, norm_picture_bas, n = draw.drawEnv( 2 * amplitudeCible, rayonCible, hauteur, nbAnneaux, 0) vertic_picture_haut = numpy.array(vertic_picture_haut, dtype='float32') norm_picture_haut = numpy.array(norm_picture_haut, dtype='float32') vertic_picture_bas = numpy.array(vertic_picture_bas, dtype='float32') norm_picture_bas = numpy.array(norm_picture_bas, dtype='float32') camera.position[2] = cameraZ * 3
def get_items(id): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) if verify_user(args['username'], args['password']) is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) username = args['username'] inventory = get_inventory_from_db(id) if inventory is None: return response_not_found(Inventory.__name__, id) if verify_user_access(username, inventory): return response_success(ItemSchema(inventory.items, many=True).data) return make_response(jsonify( {'success': False, 'error': 'User {} not authorized to access Inventory id:{}'.format(username, id)}))
def get_inventory(id): verify_args = { 'password': fields.Str(validate=password_length_validator, required=True, use=lambda val: val.lower()), 'username': fields.Str(required=True) } args = parser.parse(verify_args, request) if verify_user(args['username'], args['password']) is False: return make_response(jsonify({'success': False, 'error': 'Password incorrect'}), 401) inventory = db.session.query(Inventory).get(id) if inventory is None: return response_not_found(Inventory.__name__, id) if verify_user_access(args['username'], inventory): return response_success(inventory_schema_simple(inventory)) return make_response(jsonify( {'success': False, 'error': 'User {} not authorized to access Inventory id:{}'.format(args['username'], id)}))
def main(): total_products = [] shopping_list = ['텀블러', '가습기', '숨셔바요', '텀블러'] for no in range(0, 4): for page_no in range(2, 5): page_string = crawl(shopping_list[no], page_no) products = parse(page_string) # [] total_products += products print("--------------------------------------------") for product in total_products: print(product) write_json(json_file_name, total_products) write_excel(json_file_name)
def __init__(self): self.base_url = sys.argv[-1] self.domain = urlparse.urlparse(sys.argv[-1]).netloc.replace(':', '_') if not os.path.exists(self.domain): os.mkdir(self.domain) print '[+] Download and parse index file ...' data = self._request_data(sys.argv[-1] + '/index') with open('index', 'wb') as f: f.write(data) self.queue = Queue.Queue() for entry in parse('index'): if "sha1" in entry.keys(): self.queue.put((entry["sha1"].strip(), entry["name"].strip())) try: print entry['name'] except Exception as e: pass self.lock = threading.Lock() self.thread_count = 20 self.STOP_ME = False
def create_user(): def validate_email(email): return (len(email.split('@')) == 2) and (len(email.split('.')) >= 2) args = {"name": fields.Str(required=True), "username": fields.Str(required=True), "pass1": fields.Str(required=True), "pass2": fields.Str(required=True), "email": fields.Str(required=True, validate=validate_email)} args = parser.parse(args, request, validate=lambda args: args['pass1'] == args['pass2']) new_user = Person() new_user.name = args['name'] new_user.username = args['username'] print args['pass1'] new_user.password = encryptor.dumps(args['pass1']) print new_user.password new_user.email = args['email'] new_user.avatar = None dbadd(new_user) return redirect('/login')
def edit_item(id): request_args = { 'name': fields.Str(allow_missing=True), 'number': fields.Int(allow_missing=True) } args = parser.parse(request_args, request) if len(args) == 0: return make_response(jsonify({'success': False, 'data': 'No changes provided'})) item = get_item_from_db(id) if item is None: return response_not_found(Item.__name__, id) if 'name' in args: item.name = args['name'] if 'number' in args: item.number = args['number'] if db_add(item) is True: return response_success(ItemSchema(item).data) return make_response(jsonify({'success': False, 'data': 'Inventory {} was not updated'.format(item.name)}))
def requests_data(url): handers = { 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X)' } request = urllib2.Request(url, None, handers) return urllib2.urlopen(request).read() data = requests_data(base_url + '/.git/index') # print (base_url + '.git/index') # print (str(data)) with open('index', 'wb') as f: f.write(data) sha1 = [] file_name = [] for entry in parse('index'): # print (entry.keys()) if 'sha1' in entry.keys(): sha1.append(entry['sha1'].strip()) file_name.append(entry['name'].strip()) for i in range(len(sha1)): try: # print (sha1[i][:2]) # 拼接URI folder = '/.git/objects/{0}/'.format(sha1[i][:2]) # print (base_url + folder + sha1[i][2:]) # 拼接objects下压缩文件(哈希值)下载地址 # http://172.28.100.108:8087/.git/objects/6b/1da9533f5731c8d776aea3b197553bce1e783b data = requests_data(base_url + folder + sha1[i][2:]) try:
'--size', dest='max_size', default=10, type='int', help='max size of file. default is 5M') (options, args) = parser.parse_args() if len(args) < 1: parser.print_help() sys.exit(0) url = args[0] queue = Queue.Queue() domain = urlparse.urlparse(url).netloc.replace(':', '-') if not os.path.exists(domain): os.mkdir(domain) r = requests.get(url + '/.git/index', headers=header) with open(os.path.join(domain, 'git_index'), 'wb') as f: f.write(r.content) for entry in parse(os.path.join(domain, 'git_index')): if "sha1" in entry.keys( ) and entry['size'] / 1000000 < options.max_size: queue.put( (entry['name'].strip(), entry['size'], entry['sha1'].strip())) for i in range(options.threads_num): while 1: try: name, size, sha1 = queue.get(timeout=0.2) gitback = GitBack(name, size, sha1) gitback.start() except Queue.Empty: sys.exit(0)
bug_id = state['current_bug_ids'].pop(0) print('\r[{product_name}][{component_name}] {number_of_bug_left} bugs left; Currently retriving #{bug_id} '.format(\ product_name=state['current_product']['name'], \ component_name=state['current_component'], \ number_of_bug_left=len(state['current_bug_ids']), bug_id=bug_id), end='') content = resource.get_bug_detail(bug_id, timeout=args['timeout']) # Write raw content to file. # with open(join('xml_files', bug_id + '.xml'), 'w') as f: # f.write(content) # Parse it from XML to Python dict. parsed_object = parser.parse(content) # with open(join('xml_files', bug_id + '.json'), 'w') as f: # json.dump(result, f, sort_keys=True, indent=4) # Save data to the DB. parsed_object['_id'] = parsed_object['bug_id'] # Remove all attachment data. for attachment in parsed_object.get('attachments', []): attachment.pop('data', None) try: if args['db_update']: result = db[db_collection_name].update({'_id': parsed_object['bug_id']}, parsed_object, upsert=True) else:
data='' for x in range(Length): x = x + 1 threads = [] for j in range(8): result = "" j = j + 1 sb = MyThread(url,j,x,datapara,para) sb.setDaemon(True) threads.append(sb) for t in threads: t.start() for t in threads: t.join() tmp = "" for i in range(8): tmp = tmp + str(res[str(i+1)]) result = chr(int(tmp,2)) print result data = data + result sb = None print "[+] ok!" print "[+] result:" + data if __name__=='__main__': res={} options = parse() leng=getlen(options.url,options.data,options.para) getData(options.url,leng,options.data,options.para)
raise e if __name__ == '__main__': parser = optparse.OptionParser('Usage: %prog [option] target') parser.add_option('-t', '--threads', dest='threads_num', default=6, type='int', help='Number of threads. default=6') parser.add_option('-s', '--size', dest='max_size', default=10, type='int', help='max size of file. default is 5M') (options, args) = parser.parse_args() if len(args) < 1: parser.print_help() sys.exit(0) url = args[0] queue = Queue.Queue() domain = urlparse.urlparse(url).netloc.replace(':', '-') if not os.path.exists(domain): os.mkdir(domain) r = requests.get(url + '/.git/index', headers=header) with open(os.path.join(domain, 'git_index'), 'wb') as f: f.write(r.content) for entry in parse(os.path.join(domain, 'git_index')): if "sha1" in entry.keys() and entry['size']/1000000 < options.max_size: queue.put((entry['name'].strip(), entry['size'], entry['sha1'].strip())) for i in range(options.threads_num): while 1: try: name, size, sha1 = queue.get(timeout=0.2) gitback = GitBack(name, size, sha1) gitback.start() except Queue.Empty: sys.exit(0)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ repos list all existing repos repos list [username] list all repos of given user, default auth repos create :name create a new repo, default public repos edit :name edits repo values repos delete :name removes a repo """ """ Options: --org, -o :orgname given name is for organization --private, -p """ import requests, json, colored, time from lib import github from lib import parser print parser.parse()
import sys import json from lib import parser, pipfile_updater, hasher if __name__ == "__main__": args = json.loads(sys.stdin.read()) if args["function"] == "parse": print(parser.parse(args["args"][0])) elif args["function"] == "get_dependency_hash": print(hasher.get_dependency_hash(*args["args"])) elif args["function"] == "get_pipfile_hash": print(hasher.get_pipfile_hash(*args["args"])) elif args["function"] == "update_pipfile": print(pipfile_updater.update(*args["args"]))
def inicializa (cod,sys): resultado = parse(cod) print resultado sys = configura(resultado, sys) return sys