def __init__(self): self.cf = configparser.ConfigParser() self.root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # 获取当前文件所在目录的上一级目录 self.root_config_path = os.path.join(self.root_dir, "config.ini") self.cf.read(self.root_config_path, "utf-8") package_name = self.cf.get("DIRECTORY", "dir_name") self.pkg_config_path = os.path.join(self.root_dir, package_name, "config.ini") setup_logger() # 初始化日志配置
def main(): import argparse from common.logger import setup_logger from common.utils import StopWatch # Samples # # Download waves # $ python waves.py download -m mac_address -t timestamp -p parent_id -c credentials_file # # args parser = argparse.ArgumentParser(description="") subparsers = parser.add_subparsers(help='sub-command help') # downlaod sp = subparsers.add_parser('download', help='download waves') sp.set_defaults(cmd='download') sp.add_argument('-m', '--mac_address', required=True, help="MAC address") sp.add_argument('-t', '--timestamp', required=True, help="Timestamp") sp.add_argument('-p', '--parent_id', required=False, default="root", help="Parent ID") sp.add_argument('-c', '--credentials', required=False, default="credentials.json", help="Credentials File") sp.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") args = parser.parse_args() # logger setup_logger('Waves', '/tmp/waves.log', verbose=args.verbose) _sw = StopWatch() waves = Waves(args.credentials, args.parent_id) if args.cmd == 'download': _sw.start() res = waves.download_waves(args.mac_address, int(args.timestamp)) _sw.stop() print "Elapsed sec: %s", _sw.elapsed_sec print res.keys()
def main(): cmd_args = parse_args() setup_logger(cmd_args.log_level, cmd_args.log_filename) if cmd_args.is_path: urls = read_urls(cmd_args.value) else: urls = normalize_url(cmd_args.value.strip()) if sys.platform == 'win32': global sem_size sem_size = 50 run(urls, cmd_args)
def web_run(url, log_lvl, log_path, char_mode, report_path, report_name): if not os.path.isdir(log_path): os.mkdir(log_path) if not os.path.isdir(report_path): os.mkdir(report_path) setup_logger(log_level(log_lvl), f"{log_path}/cors.log") url = normalize_url(url.strip()) sem_size = 5000 stats = Statistics(report_path, report_name) checker = CORSChecker(url, sem_size, char_mode=char_mode, stats=stats, if_report=True) checker.run()
def __init__(self, vm, callback=None, interval=10): self._vm = vm self._callback = callback self._init_vmi() self.logger = setup_logger( self._vm.name, self._vm.name + '/' + self._vm.name + '.log', logging.INFO) self._ori_ps_list = self._get_process_list() self._ori_ps_set = set(self._ori_ps_list.keys()) self._timer = RepeatableTimer(interval, self.check_process) self._dump_enabled = False
#!/usr/bin/env python3 import os import git from git import Repo from common.logger import setup_logger logger = setup_logger(__file__) def find_repositories(): logger.info("Finding repositories") repositories = [] for root, folders, files in os.walk('.'): logger.debug("In folder: {}".format(root)) if '.git' in folders: logger.info("Found repository in: {}".format(root)) repositories.append(root) return repositories def check_repos(folders): logger.info("Checking {} repos".format(len(folders))) repos = [] for folder in folders: check = check_repo(folder) if check is not None: repos.append(check)
parser.add_argument('-pr', '--protocols', help='List of protocols used to generate payloads', nargs="+", default=['http', 'https', 'dict']) args = parser.parse_args() if not validators.domain(args.tdomain) and not validators.ip_address.ipv4( args.tdomain): print("Target domain is not a proper domain name") sys.exit(1) target_domain = args.tdomain if not validators.domain(args.ldomain) and not validators.ip_address.ipv4( args.ldomain): print("Listening domain is not a proper domain name") sys.exit(1) forgery_domain = args.ldomain ports = args.ports protocols = args.protocols return target_domain, forgery_domain, ports, protocols if __name__ == '__main__': target_domain, forgery_domain, ports, protocols = parse_args() setup_logger(log_level(3)) pg = PayloadGenerator(target_domain, forgery_domain, ports=ports, protocols=protocols) pg.run()
def main(): import argparse from common.logger import setup_logger credentials = "tests/unittest_credentials.json" # Samples # # Upload a sample.txt to the below # Test/parent1/sample.txt # Test/parent2/sample.txt # $ python google_drive.py put -ff sample.txt -tp "Test/parent1, Test/parent2" -tf sample.txt -m text/plain # # Retrieve the fileID of sample.txt # $ python google_drive.py list -fn sample.txt -p Test/parent1 # # Download sample.txt # $ python google_drive.py get -fid 0B-y2EPc2m4U5UjlYc0VGT1BhZ0U # # args parser = argparse.ArgumentParser(description="") subparsers = parser.add_subparsers(help='sub-command help') # put sp_put = subparsers.add_parser('put', help='Upload file') sp_put.set_defaults(cmd='put') sp_put.add_argument('-ff', '--from_filename', required=True, help="From filename") sp_put.add_argument('-tp', '--to_paths', required=True, help="Comma-Separated Paths to directory") sp_put.add_argument('-tf', '--to_filename', required=True, help="To filename") sp_put.add_argument('-m', '--mime_type', required=False, default="text/csv", help="MIME type") sp_put.add_argument('-p', '--parent_id', required=False, default="root", help="Parent folder ID") sp_put.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # update sp_update = subparsers.add_parser('update', help='Update file') sp_update.set_defaults(cmd='update') sp_update.add_argument('-ff', '--from_filename', required=True, help="From filename") sp_update.add_argument('-tp', '--to_paths', required=True, help="Comma-Separated Paths to directory") sp_update.add_argument('-tf', '--to_filename', required=True, help="To filename") sp_update.add_argument('-m', '--mime_type', required=False, default="text/csv", help="MIME type") sp_update.add_argument('-p', '--parent_id', required=False, default="root", help="Parent folder ID") sp_update.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # list sp_list = subparsers.add_parser('list', help='Get all file information') sp_list.set_defaults(cmd='list') sp_list.add_argument('-k', '--key', required=False, default=None, help="Keyword") sp_list.add_argument('-fn', '--filename', required=False, default=None, help="Filename") sp_list.add_argument('-p', '--paths', required=False, default=None, help="Paths") sp_list.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # get sp_get = subparsers.add_parser('get', help='Download files') sp_get.set_defaults(cmd='get') sp_get.add_argument('-fid', '--file_id', required=True, nargs='+', help="File ID") sp_get.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # add parent sp_get = subparsers.add_parser('addParent', help='Add parents to file') sp_get.set_defaults(cmd='addParent') sp_get.add_argument('-fid', '--file_id', required=True, help="File ID and parent ID") sp_get.add_argument('-pid', '--parent_id', required=True, help="File ID and parent ID") sp_get.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") args = parser.parse_args() # logger setup_logger('GoogleDrive', '/tmp/google_drive.log', verbose=args.verbose) gdrive = GoogleDrive(credentials) if args.cmd == 'put': paths = args.to_paths.split(',') to_paths = [path.strip().split('/') for path in paths] gdrive.upload_file(args.from_filename, to_paths, args.to_filename, mime_type=args.mime_type, parent_id=args.parent_id, overwrite=False) if args.cmd == 'update': paths = args.to_paths.split(',') to_paths = [path.strip().split('/') for path in paths] gdrive.upload_file(args.from_filename, to_paths, args.to_filename, mime_type=args.mime_type, parent_id=args.parent_id) if args.cmd == 'list': if args.paths is None: paths = [] else: paths = args.paths.split('/') if args.key is None: if args.filename is None: print "Please input key or filename." exit(1) res = gdrive.retrieve_all_files(args.filename, paths, [], False) else: res = gdrive.retrieve_all_files(args.key, paths, [], True) if res: print "%d Files were found." % len(res) for r in res: print r.get('name'), r.get('id') else: print "Files were not found." if args.cmd == 'get': #bd = gdrive.download_file(args.file_id) bd = gdrive.download_list_of_files(args.file_id) print "Length: %s" % len(bd) # print bytes if args.cmd == 'addParent': pids = gdrive.add_parents(args.file_id, args.parent_id) print "Parents ID: %s" % pids
def main(): import argparse import os import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from common.logger import setup_logger credentials = "./tests/unittest_credentials.json" paths = [] # Samples # # Get a list of input files # $ python dg_meter_info.py list # # Download a csv file # $ python dg_meter_info.py get -fid 0B-y2EPc2m4U5UjlYc0VGT1BhZ0U # # Upload a succeeded csv file # $ python dg_meter_info.py put -f xxxxxx.csv -d data -m succeeded # args parser = argparse.ArgumentParser(description="") subparsers = parser.add_subparsers(help='sub-command help') # list sp_list = subparsers.add_parser('list', help='Get a list of input files') sp_list.set_defaults(cmd='list') sp_list.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # get sp_get = subparsers.add_parser('get', help='Download files') sp_get.set_defaults(cmd='get') sp_get.add_argument('-fid', '--file_id', required=True, help="File ID") sp_get.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") # put sp_put = subparsers.add_parser('put', help='Upload file') sp_put.set_defaults(cmd='put') sp_put.add_argument('-f', '--filename', required=True, help="Filename") sp_put.add_argument('-d', '--data', required=True, help="CSV data") sp_put.add_argument('-m', '--mode', choices=('succeeded', 'failed'), help="Upload csv data to succeeded directory or failed directory") sp_put.add_argument('-v', '--verbose', action='store_true', default=False, help="Make the operation more talkative") args = parser.parse_args() # logger setup_logger('GDMeterInfo', '/tmp/dg_meter_info.log', verbose=args.verbose) gd = GDMeterInfo(credentials, paths) if args.cmd == 'list': files = gd.get_all_input_csv_files() for f in files: print f['id'], f['name'] if args.cmd == 'get': data = gd.download_file(args.file_id) print data.decode('sjis') if args.cmd == 'put': if args.mode == 'succeeded': gd.put_csv_data(args.filename, args.data, True) if args.mode == 'failed': gd.put_csv_data(args.filename, args.data, False)