def _init(project_name, output_dir): # atexit.register(_quit) project_dir = os.path.join(output_dir, project_name) #create proj dir if os.path.isdir(project_dir): #prompt if they wanna overwrite v.warn("Directory " + project_dir + " exists!") if not helper.query_yes_no("Would you like to write to this directory?"): raise FileExistsError else: #create project dir pass v.log("Creating directory " + project_dir) os.makedirs(project_dir, exist_ok=True) report_dir = os.path.join(project_dir, "report") scans_dir = os.path.join(project_dir, "scans") logs_dir = os.path.join(project_dir, "logs") # create these sub directories # Confirm this directory exists; return 0 if "username_wordlist" in service_scans_config: if isinstance(service_scans_config["username_wordlist"], str): username_wordlist = service_scans_config["username_wordlist"] if "password_wordlist" in service_scans_config: if isinstance(service_scans_config["password_wordlist"], str): password_wordlist = service_scans_config["password_wordlist"]
def mergeListOfRepos(self, list_repos, checkout_branch='master', ff_forward_branch='origin/develop', logging=True, question=True, options=[]): read_check_branch = checkout_branch read_ff_branch = ff_forward_branch if 'origin/' not in read_check_branch: checkout_branch = 'origin/' + read_check_branch if 'origin/' not in read_ff_branch: checkout_branch = 'origin/' + read_ff_branch ret = {'retCode':0, 'stdout':'', 'stderr':''} for module in list_repos: if logging is True: print (bc.WARNING + "ff-Merge: %s %s --> %s" + bc.ENDC) %(module, checkout_branch, ff_forward_branch) print("Commits:") print(self.gitObj.getLog(module, checkout_branch, ff_forward_branch, "%h, %cd, %cn, %s", ['--first-parent', "--date=format:'%a %d-%m-%Y %R'"])['stdout']) if (question is False) or (helper.query_yes_no("Do you want to ff-merge: %s" %(module), 'no') == 'yes'): ret = self.gitObj.mergeRepo(module, read_check_branch, read_ff_branch, options) if 'retCode' in ret: if ret['retCode'] == 128: print(bc.FAIL + "ff-Merge not possible: %s %s --> %s\nMerge Commit only" + bc.ENDC) %(module, checkout_branch, ff_forward_branch) else: ret = {'retCode':-1, 'stdout':'no', 'stderr': ''} if (logging is True) and (ret['retCode'] == 0): output_string = module + '\n' output_string += self.gitObj.getLog(module, checkout_branch, ff_forward_branch, '%H', ['--first-parent'])['stdout'] output_string += '\n' with open('history_merge.txt', 'a') as the_file: the_file.write( output_string ) return ret
def pushListOfRepos(self, list_repos, logging=True, question=True, options=[]): for module in list_repos: if (question == False) or (helper.query_yes_no("\nDo you want to push: %s" %(module), 'no') == 'yes'): ret = self.gitObj.pushRepo(module, options) else: ret = {'retCode':-1, 'stdout':'no', 'stderr': ''} if (logging is True) and (ret['retCode'] == 0): self.gitObj.log.info("Module: {} pushed.".format(module))
def main(argv): parser = argparse.ArgumentParser(description='FFmpegConveter') parser.add_argument('-i', dest='input', required=True, help='input file', metavar='FILE') parser.add_argument('-s', dest='subtitlesSize', type=int , default=23) args = parser.parse_args() path = args.input subtitlesSize = args.subtitlesSize print 'Processing:\t', os.path.basename(path) commands = [] commands.append('ffmpeg') commands.append('-i "' + path + '"') commands.append('-vcodec h264') commands.append('-vprofile high') commands.append('-preset superfast') commands.append('-threads 0') commands.append('-acodec ac3') commands.append('-map 0:v:0') commands.append('-map 0:a') commands.append('-fs 2100000000') length = getLength(path) size = os.path.getsize(path) pathSRT = changeExtension(path, 'srt') if not os.path.isfile(pathSRT): if not helper.query_yes_no('Subtitles not found, continue?'): quit() crf = getCRF((size / length) / 1000) if crf > 0: commands.append('-crf ' + str(crf)) part = 1 lengthParts = 0 while lengthParts + (2 * part) < length: commandsT = list(commands) outputPath = createOutputPath(path, '_' + str(part)) outputPath = changeExtension(outputPath, 'mkv') if os.path.isfile(pathSRT): with tempfile.NamedTemporaryFile(dir='tmp', suffix='.srt', delete=False) as tmpfile: subs = pysrt.open(pathSRT) subs.shift(seconds=-lengthParts) subs.save(tmpfile.name) commandsT.append('-vf "subtitles=\'' + os.path.relpath(tmpfile.name).replace('\\', '\\\\') + '\':force_style=\'Fontsize=' + str(subtitlesSize) + '\'"') commandsT.insert(1, '-ss ' + str(lengthParts)) commandsT.append('"' + outputPath + '"') executeCommands(commandsT) lengthParts = lengthParts + getLength(outputPath) - 2 part = part + 1
def resetListOfRepos(self, list_repos, reset_branch='origin/master', logging=True, question=True, options=[]): for module in list_repos: if logging == True: self.gitObj.log.info("=====================") self.gitObj.log.info("reset Module: {} --> {}".format(module,reset_branch)) self.gitObj.log.info("Commits will be discarded:") #self.gitObj.log.info( self.gitObj.getLog(module, 'HEAD', reset_branch, '%H', ['--first-parent'])['stdout'] ) self.gitObj.log.info( self.gitObj.getLog(module, reset_branch, 'HEAD', '%H', ['--graph', '--oneline', '--decorate'])['stdout'] ) if question == True: if (helper.query_yes_no("Do you want to reset: %s" %(module), 'no') == 'yes'): self.gitObj.resetRepo(module, reset_branch, options) self.gitObj.log.info("Module: {} reseted to Branch: {}".format(module,reset_branch)) #self.gitObj.log.info( self.gitObj.getLog(module, reset_branch, 'HEAD', '%H', ['--graph', '--oneline', '--decorate'])['stdout'] ) elif question == False: self.gitObj.resetRepo(module, reset_branch, options) self.gitObj.log.info("Module: {} reseted to Branch: {}".format(module,reset_branch)) else: self.gitObj.log.warning("wrong use of logging Parameter. Must be True or False")
def __main__(): args = parser.parse_args() provider = Web3.WebsocketProvider('wss://mainnet.infura.io/ws/') # provider = Web3.HTTPProvider('https://mainnet.infura.io/') # provider = Web3.IPCProvider() w3 = Web3(provider) if args.start_block: start_block = args.start_block else: start_block = 0 if args.end_block: end_block = args.end_block else: end_block = w3.eth.blockNumber client = MongoClient() dbnames = client.list_database_names() if args.drop and args.database in dbnames: if not args.skip_confirmation: if not query_yes_no('Are you sure you want to drop existing DB: '+args.database, default='no'): sys.exit() client.drop_database(args.database) db = client[args.database] block_collection = db['blocks'] tx_collection = db['transactions'] txreceipt_collection = db['txreceipts'] filtered_addrs = [] if args.addr: filtered_addrs += args.addr.split(',') elif args.file: filtered_addrs += open(args.file, 'r').read().split('\n') filtered_addrs = [i.lower() for i in filtered_addrs if Web3.isAddress(i)] bar = progressbar.ProgressBar(max_value=end_block-start_block) tx_count = 0 for idx in range(start_block, end_block+1): bar.update(idx-start_block) block = w3.eth.getBlock(idx, full_transactions=True) block_without_tx = block_to_dict(block) if 'transactions' in block_without_tx: del block_without_tx['transactions'] block_collection.insert_one(block_without_tx) txs = block.transactions lines = [] for n, tx in enumerate(txs): if tx['to']: to_matches = tx['to'].lower() in filtered_addrs else: to_matches = False if tx['from']: from_matches = tx['from'].lower() in filtered_addrs else: from_matches = False if to_matches or from_matches or filtered_addrs == []: # print('Found tx: %s'%tx['hash'].hex()) tx_collection.insert_one(tx_to_dict(tx)) tx_receipt = w3.eth.getTransactionReceipt(tx['hash']) txreceipt_collection.insert_one(tx_to_dict(tx_receipt)) tx_count += 1 bar.finish() txreceipt_collection.create_index('transactionHash') logging.info('Finished importing %d txs from %d blocks'%(tx_count, end_block-start_block))
def __main__(): args = parser.parse_args() client = MongoClient() dbnames = client.list_database_names() if args.drop and args.database in dbnames: if not args.skip_confirmation: if not query_yes_no('Are you sure you want to drop existing DB: '+args.database, default='no'): sys.exit() client.drop_database(args.database) db = client[args.database] tx_collection = db['transactions'] tx_collection.create_index([("hash", pymongo.ASCENDING)], unique=True) filtered_addrs = [] if args.addr: filtered_addrs += args.addr.split(',') elif args.file: filtered_addrs += open(args.file, 'r').read().split('\n') filtered_addrs = [i.lower() for i in filtered_addrs if Web3.isAddress(i)] bar = progressbar.ProgressBar(max_value=args.end_block-args.start_block) tx_count = 0 start = args.start_block end = args.end_block while True: response = requests.get(get_url(args.addr, start, end)) txs = response.json()['result'] if txs == None: time.sleep(args.delay) print('Nothing returned. Repeating API call.') continue # txs = sorted(txs, key=lambda x: x['blockNumber']) for n, tx in enumerate(txs): try: tx_collection.insert_one(tx_to_dict(tx)) tx_count += 1 except pymongo.errors.DuplicateKeyError: pass if len(txs) == 0: break if int(txs[-1]['blockNumber']) >= end: break start = int(txs[-1]['blockNumber']) # end = txs[-1]['blockNumber'] print('Scraped', txs[0]['blockNumber'], '-',txs[-1]['blockNumber']) time.sleep(args.delay) import ipdb; ipdb.set_trace() logging.info('Finished importing %d txs from %d blocks'%(tx_count, args.end_block-args.start_block))
# ======= Check for shipIts from reviewBoard ======= # shipIts = False # ship it is true when at least 3 shipIts, one from taskforce and one from imb(175er) if (shipItCount >= 3): if (shipItNames & lTaskforce): if (shipItNames & lImb): shipIts = True # ======= Jira, RevBoard and can_be_merged okay ====== # if (ready == True and reviewId != "None" and shipIts == True and m['merge_status'] == "can_be_merged"): print("nMR: {} src: {} -> {}".format(m['merge_status'], m['source_branch'], m['target_branch'])) if (helper.query_yes_no("Do you want to merge MR: %s" % (m['url']), 'no') == 'yes'): gitlab_obj.mergeMergeRequest(m['url']) warn_print("MR is merged now. \n") else: warn_print( "MR is not merged and added to the mergeable list \n") mr = { "mergeRequest": m, "JiraOkay": ready, "mergeable": m['merge_status'], "reviewId": True, "reviewInfo": reviewInfo } mergeable.append(mr) # ======= checks not okay ======= #
(src, target, SYS_REF, ZOOM_START, ZOOM_END) GDAL_BIN_DIRECTORY = "" #list of supported extensions supportedExtensions = [".sid",".jp2",".tif"] if not os.path.isdir(src): print "Error: Source folder [%s] does not exist." % src sys.exit(-1) if not os.path.isdir(target): print "Error: Target folder [%s] does not exist." % target sys.exit(-1) answer = helper.query_yes_no("Destination folder will be erased. Continue?") if not answer: print "Exiting..." sys.exit() start_time = time.time() # cleaning target folder for file in os.listdir(target): file_path = os.path.join(target, file) if os.path.isdir(file_path): if file != "tmp" or remove_tmp: shutil.rmtree(file_path) else: os.unlink(file_path)
(src, target, SYS_REF, ZOOM_START, ZOOM_END) GDAL_BIN_DIRECTORY = "" #list of supported extensions supportedExtensions = [".sid", ".jp2", ".tif"] if not os.path.isdir(src): print "Error: Source folder [%s] does not exist." % src sys.exit(-1) if not os.path.isdir(target): print "Error: Target folder [%s] does not exist." % target sys.exit(-1) answer = helper.query_yes_no("Destination folder will be erased. Continue?") if not answer: print "Exiting..." sys.exit() start_time = time.time() # cleaning target folder for file in os.listdir(target): file_path = os.path.join(target, file) if os.path.isdir(file_path): if file != "tmp" or remove_tmp: shutil.rmtree(file_path) else: os.unlink(file_path)
supportedExtensions = [".png"] start_time = time.time() copiedTiles = 0 mergedTiles = 0 # Check if folders if not os.path.isdir(src): print "Error: Source folder [%s] does not exist." % src sys.exit(-1) if not os.path.isdir(target): print "Error: Target folder [%s] does not exist." % target sys.exit(-1) answer = helper.query_yes_no("Target folder structure and files will be modified. Continue?") if not answer: print "Exiting..." sys.exit() # Get all source files srcFiles = [] for root, dir, files in os.walk(src): for file in files: fileName, fileExtension = os.path.splitext(file) if fileExtension in supportedExtensions: coords = root.split("/")[-2:] coords.append(file) srcFiles.append("/".join(coords))