예제 #1
0
def cli():
    """Cli implementation"""
    import argparse

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=35, width=100
        )
    )
    parser.add_argument(
        "-D",
        "--dataset",
        dest='dataset',
        default=False,
        help="Analyse data-set by it's path.",
    )
    parser.add_argument(
        "-f",
        "--file",
        dest='file',
        help="Source code path that will be analysed",
        type=argparse.FileType('r', encoding='UTF-8'),
    )
    parser.add_argument(
        "-m",
        "--mode",
        dest='mode',
        default=Mode.EXEC,
        choices=Mode.get_attr(),
        help="Compiler mode (choices: %(choices)s) (default: %(default)s)",
    )
    parser.add_argument(
        "-O",
        "--output-type",
        dest='output_type',
        default=Output.Format.JSON,
        choices=Output.Format.get_attr(),
        help="output type (choices: %(choices)s) (default: %(default)s)",
    )
    parser.add_argument(
        "-o",
        "--output-file",
        dest='output_file',
        default=Output.Location.SYSTEM_OUT,
        help="Write in output file (example: ~/path/to/file) (default: %(default)s)",
    )
    parser.add_argument(
        "-v",
        "--version",
        action="version",
        version=f"%(prog)s {__version__}",
    )
    parser.add_argument(
        "--with-report",
        dest='with_report',
        help="Output comparison analysis between original AST and custom AST.",
        action="store_true"
    )
    args = parser.parse_args()
    if args.output_type == Output.Format.PICKLE and args.output_file is Output.Location.SYSTEM_OUT:
        parser.error("Chosen PICKLE output but no -o/--output-file specified.")
    if args.output_type == Output.Format.PICKLE and args.report:
        parser.error("PICKLE and report combination not available. Please, choose JSON format to extract report.")
    compress(
        file=args.file.read(),
        filename=args.file.name,
        mode=args.mode,
        output_type=args.output_type,
        output_file=args.output_file,
        report=args.with_report,
        dataset_path=args.dataset
    )
예제 #2
0
if __name__ == '__main__':

    parser = argparse.ArgumentParser(
        description=constant.st.banner,
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('--version',
                        action='version',
                        version='Version ' + str(constant.CURRENT_VERSION),
                        help='laZagne version')

    # ------------------------------------------- Permanent options -------------------------------------------
    # Version and verbosity
    PPoptional = argparse.ArgumentParser(
        add_help=False,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=constant.MAX_HELP_POSITION))
    PPoptional._optionals.title = 'optional arguments'
    PPoptional.add_argument('-v',
                            dest='verbose',
                            action='count',
                            default=0,
                            help='increase verbosity level')
    PPoptional.add_argument(
        '-quiet',
        dest='quiet',
        action='store_true',
        default=False,
        help='quiet mode: nothing is printed to the output')

    # Output
    PWrite = argparse.ArgumentParser(
예제 #3
0
 def formatter(prog):
     return argparse.HelpFormatter(prog, max_help_position=100, width=200)
예제 #4
0
    def __init__(self):
        self.term_width = shutil.get_terminal_size().columns
        self.formatter = lambda prog: argparse.HelpFormatter(
            prog,
            max_help_position=int(self.term_width / 2),
            width=self.term_width)

        self.commands = {}
        self.hidden_commands = []
        self.parser = argparse.ArgumentParser(prog='meson',
                                              formatter_class=self.formatter)
        self.subparsers = self.parser.add_subparsers(
            title='Commands',
            dest='command',
            description=
            'If no command is specified it defaults to setup command.')
        self.add_command('setup',
                         msetup.add_arguments,
                         msetup.run,
                         help_msg='Configure the project')
        self.add_command(
            'configure',
            mconf.add_arguments,
            mconf.run,
            help_msg='Change project options',
        )
        self.add_command(
            'dist',
            mdist.add_arguments,
            mdist.run,
            help_msg='Generate release archive',
        )
        self.add_command('install',
                         minstall.add_arguments,
                         minstall.run,
                         help_msg='Install the project')
        self.add_command('introspect',
                         mintro.add_arguments,
                         mintro.run,
                         help_msg='Introspect project')
        self.add_command('init',
                         minit.add_arguments,
                         minit.run,
                         help_msg='Create a new project')
        self.add_command('test',
                         mtest.add_arguments,
                         mtest.run,
                         help_msg='Run tests')
        self.add_command('wrap',
                         wraptool.add_arguments,
                         wraptool.run,
                         help_msg='Wrap tools')
        self.add_command('subprojects',
                         msubprojects.add_arguments,
                         msubprojects.run,
                         help_msg='Manage subprojects')
        self.add_command('help',
                         self.add_help_arguments,
                         self.run_help_command,
                         help_msg='Print help of a subcommand')
        self.add_command(
            'rewrite',
            lambda parser: rewriter.add_arguments(parser, self.formatter),
            rewriter.run,
            help_msg='Modify the project definition')
        self.add_command('compile',
                         mcompile.add_arguments,
                         mcompile.run,
                         help_msg='Build the project')
        self.add_command('devenv',
                         mdevenv.add_arguments,
                         mdevenv.run,
                         help_msg='Run commands in developer environment')

        # Hidden commands
        self.add_command('runpython',
                         self.add_runpython_arguments,
                         self.run_runpython_command,
                         help_msg=argparse.SUPPRESS)
        self.add_command('unstable-coredata',
                         munstable_coredata.add_arguments,
                         munstable_coredata.run,
                         help_msg=argparse.SUPPRESS)
예제 #5
0
파일: dnscan.py 프로젝트: z3n70/dirscan
def get_args():
    global args

    parser = argparse.ArgumentParser(
        'dnscan.py',
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=40),
        epilog=
        "Specify a custom insertion point with %% in the domain name, such as: dnscan.py -d dev-%%.example.org"
    )
    target = parser.add_mutually_exclusive_group(
        required=True)  # Allow a user to specify a list of target domains
    target.add_argument('-d',
                        '--domain',
                        help='Target domain',
                        dest='domain',
                        required=False)
    target.add_argument('-l',
                        '--list',
                        help='File containing list of target domains',
                        dest='domain_list',
                        required=False)
    parser.add_argument('-w',
                        '--wordlist',
                        help='Wordlist',
                        dest='wordlist',
                        required=False)
    parser.add_argument('-t',
                        '--threads',
                        help='Number of threads',
                        dest='threads',
                        required=False,
                        type=int,
                        default=8)
    parser.add_argument('-6',
                        '--ipv6',
                        help='Scan for AAAA records',
                        action="store_true",
                        dest='ipv6',
                        required=False,
                        default=False)
    parser.add_argument('-z',
                        '--zonetransfer',
                        action="store_true",
                        default=False,
                        help='Only perform zone transfers',
                        dest='zonetransfer',
                        required=False)
    parser.add_argument('-r',
                        '--recursive',
                        action="store_true",
                        default=False,
                        help="Recursively scan subdomains",
                        dest='recurse',
                        required=False)
    parser.add_argument(
        '-R',
        '--resolver',
        help="Use the specified resolver instead of the system default",
        dest='resolver',
        required=False)
    parser.add_argument('-T',
                        '--tld',
                        action="store_true",
                        default=False,
                        help="Scan for TLDs",
                        dest='tld',
                        required=False)
    parser.add_argument('-o',
                        '--output',
                        help="Write output to a file",
                        dest='output_filename',
                        required=False)
    parser.add_argument('-i',
                        '--output-ips',
                        help="Write discovered IP addresses to a file",
                        dest='output_ips',
                        required=False)
    parser.add_argument('-D',
                        '--domain-first',
                        action="store_true",
                        default=False,
                        help='Output domain first, rather than IP address',
                        dest='domain_first',
                        required=False)
    parser.add_argument('-v',
                        '--verbose',
                        action="store_true",
                        default=False,
                        help='Verbose mode',
                        dest='verbose',
                        required=False)
    parser.add_argument('-n',
                        '--nocheck',
                        action="store_true",
                        default=False,
                        help='Don\'t check nameservers before scanning',
                        dest='nocheck',
                        required=False)
    args = parser.parse_args()
예제 #6
0
########################################
version = '0.8'
from datetime import datetime
import lxml
from lxml import html
import requests
import numpy as np
import pandas as pd
import argparse
import json
from collections import OrderedDict
########################################
# ARGS
########################################
# make help output neater
formatter = lambda prog: argparse.HelpFormatter(prog, max_help_position=52)
parser = argparse.ArgumentParser(
    formatter_class=formatter,
    description='General purpose Yahoo! Finance scraper')
parser.add_argument('symbols',
                    nargs='+',
                    metavar='symbol',
                    action='store',
                    help='ticker symbol(s)')
parser.add_argument('--version',
                    action='version',
                    version='%(prog)s ' + version)
parser.add_argument('-t',
                    '--transpose',
                    action='store_true',
                    help='transpose rows and columns')
예제 #7
0
def main():
    user = createUser()

    argparser = argparse.ArgumentParser(
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=40))
    argparser.add_argument(
        "-s",
        "--sort",
        help="sort by hot, new, rising, random_rising, controversial or top",
        type=str,
        choices=[
            'hot', 'new', 'rising', 'random_rising', 'controversial', 'top'
        ])
    argparser.add_argument("-l",
                           "--limit",
                           help="how many posts to grab",
                           type=int)
    argparser.add_argument("-t",
                           "--threshold",
                           help="only post with score above threshold",
                           type=int)
    argparser.add_argument("-ia",
                           "--include-albums",
                           help="include tracks from albums",
                           action="store_true")
    argparser.add_argument("-v",
                           "--verbose",
                           help="output songs being added and other info",
                           action="store_true")
    argparser.add_argument("-f",
                           "--fresh",
                           help="only add tracks with the [FRESH] tag",
                           action="store_true")
    argparser.add_argument("-p",
                           "--playlists",
                           help="add or remove playlists",
                           action="store_true")

    args = argparser.parse_args()

    verbose = True if args.verbose else False
    fresh = args.fresh
    l = args.limit if args.limit else False
    choice = args.sort if args.sort else None
    threshold = args.threshold if args.threshold else None
    includeAlbums = True if args.include_albums else False
    managePlaylists = True if args.playlists else False

    # connect to reddit bot
    reddit = praw.Reddit('bot1')
    subreddit = reddit.subreddit('hiphopheads')

    # create spotipy obj
    spotifyObj = spotipy.Spotify(auth=user.token)
    spotifyObj.trace = False

    if verbose:
        print('Welcome to the HipHopHeads Fresh Script')

    if managePlaylists:
        user.playlist = manage_playlists(user.playlist)

    if not choice:
        inputPrompt = textwrap.dedent("""\
        Enter your desired sorting method:
            hot
            new
            rising
            random_rising
            controversial
            top
        """)
        choice = input(inputPrompt)

    if not l:
        l = int(input('enter post limit: '))

    if not fresh:
        fresh_input = input(
            'Would you like to only add tracks tagged as [FRESH]? (y/n)')
        if fresh_input.lower().strip() == "y":
            fresh = True
        else:
            fresh = False

    if choice.lower() == 'hot':
        sub_choice = subreddit.hot(limit=l)
    elif choice.lower() == 'new':
        sub_choice = subreddit.new(limit=l)
    elif choice.lower() == 'rising':
        sub_choice = subreddit.rising(limit=l)
    elif choice.lower() == 'random_rising':
        sub_choice = subreddit.random_rising(limit=l)
    elif choice.lower() == 'controversial':
        sub_choice = subreddit.controversial(limit=l)
    elif choice.lower() == 'top':
        sub_choice = subreddit.top(limit=l)
    else:
        print("Unsupported sorting method")
        sys.exit()

    tracks = []
    tracks_array = []
    for sub in sub_choice:
        if sub.domain == "open.spotify.com":
            # check if post is a track or album
            isMatch = re.search('(track|album)', sub.url)
            if isMatch != None:
                if verbose:
                    print("Post: ", sub.title)
                    print("URL: ", sub.url)
                    print("Score: ", sub.score)
                    print("------------------------\n")

                # Discard post below threshold if given
                if threshold and sub.score < threshold:
                    continue

                # If fresh flag given, discard post if not tagged [FRESH]
                if fresh and "[FRESH]" not in sub.title:
                    continue

                # handle possible query string in url
                url = sub.url.split('?')
                formattedUrl = url[0] if url != None else sub.url

                # handle adding tracks from albums
                if includeAlbums and isMatch.group(1) == 'album':
                    tracksInAlbum = spotifyObj.album_tracks(formattedUrl)
                    trackIds = [
                        item['external_urls']['spotify']
                        for item in tracksInAlbum['items']
                    ]
                    tracks.extend(trackIds)
                # handle adding tracks
                elif isMatch.group(1) == 'track':
                    tracks.append(formattedUrl)
        else:
            # handle non-spotify posts
            title, tags = filter_tags(sub.title)
            if 'discussion' not in tags:
                if 'album' in tags or 'impressions' in tags:
                    # there is a pull request for this feature at the moment
                    # so I will leave it out for now
                    search = spotifyObj.search(title, type='album')
                else:
                    search = spotifyObj.search(title, type='track')
                    if search:
                        track_url = extract_track_url(search)
                        if track_url:
                            tracks.append(track_url)
        # handle overflow
        if len(tracks) > 90:
            tracks_array.append(tracks)
            tracks = []

    if len(tracks) > 1:
        tracks_array.append(tracks)

    # handle remove duplicates of tracks before adding new tracks
    if tracks != [] or tracks_array != []:
        try:
            if len(tracks_array) > 1:
                for tr in tracks_array:
                    for playlist in user.playlist.split(','):
                        # retrive information of the tracks in user's playlist
                        existing_tracks = spotifyObj.user_playlist_tracks(
                            user.username, playlist)
                        spotifyObj.user_playlist_remove_all_occurrences_of_tracks(
                            user.username, playlist, tr)
                        results = spotifyObj.user_playlist_add_tracks(
                            user.username, playlist, tr)
                        if verbose:
                            print(
                                'New Tracks added to ',
                                spotifyObj.user_playlist(
                                    user.username, playlist, 'name')['name'],
                                ': ',
                                abs(existing_tracks['total'] -
                                    spotifyObj.user_playlist_tracks(
                                        user.username, playlist)['total']))
                            print()
        except:
            if results == [] and verbose:
                print("no new tracks have been added.")
            else:
                print("an error has occured removing or adding new tracks")
예제 #8
0
def parse():
    """
    Parses the given arguments into global variables.
    """

    global GENERATE
    global BOTTOM_LEFT
    global TOP_RIGHT
    global TILE_SIZE
    global TOTAL

    parser = argparse.ArgumentParser(description="compute the most relevant topics in a custom grid within a square geographic area", formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=30))
    parser.add_argument("-v", "--version", action="version", version="0.0.0")

    parser.add_argument("bottomleftx", type=_is_float, help="the x coordinate of the bottomleft vertex of the main geographic area")
    parser.add_argument("bottomlefty", type=_is_float, help="the y coordinate of the bottomleft vertex of the main geographic area")
    parser.add_argument("toprightx", type=_is_float, help="the x coordinate of the topright vertex of the main geographic area")
    parser.add_argument("toprighty", type=_is_float, help="the y coordinate of the topright vertex of the main geographic area")
    parser.add_argument("tilesize", type=_is_float, help="the size of each square tile that forms the grid within the main geographic area")
    parser.add_argument("-g", "--generate", action="store_true", help="generate a new grid in the specified area")

    args = parser.parse_args()

    GENERATE = args.generate
    BOTTOM_LEFT = (args.bottomleftx, args.bottomlefty)
    TOP_RIGHT = (args.toprightx, args.toprighty)
    TILE_SIZE = args.tilesize
    TOTAL = ((TOP_RIGHT[0]-BOTTOM_LEFT[0])/TILE_SIZE) * ((TOP_RIGHT[1]-BOTTOM_LEFT[1])/TILE_SIZE)
예제 #9
0
 def _usage(self, actions, prog):
     formatter = argparse.HelpFormatter(prog)
     formatter.add_usage(None, actions, [])
     return formatter.format_help().strip()
예제 #10
0
def cli():
    import argparse
    import sys

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=35, width=100),
    )
    parser.add_argument(
        "-a",
        "--attributes",
        help="include attributes such as line numbers and column offsets",
        action="store_true",
    )
    parser.add_argument("-c",
                        "--command",
                        type=str,
                        metavar="cmd",
                        help="program passed in as string")
    parser.add_argument(
        "-m",
        "--mode",
        type=Mode,
        metavar="mode",
        default=Mode.EXEC,
        choices=list(Mode),
        help="compilation mode (choices: %(choices)s) (default: %(default)s)",
    )
    parser.add_argument(
        "-t",
        "--terse",
        help="terse output by disabling field annotations",
        action="store_true",
    )
    parser.add_argument("-v",
                        "--version",
                        action="version",
                        version=f"%(prog)s {__version__}")
    parser.add_argument(
        "file",
        nargs="?",
        type=argparse.FileType("r"),
        help="program passed in as file",
        default=sys.stdin,
    )
    args = parser.parse_args()

    if args.command:
        pprintast(args.command,
                  mode=args.mode,
                  terse=args.terse,
                  attributes=args.attributes)
    else:
        pprintast(
            args.file.read(),
            filename=args.file.name,
            mode=args.mode,
            terse=args.terse,
            attributes=args.attributes,
        )
예제 #11
0
    (C) 2015 Dimitri Fousekis (@rurapenthe0) and @m3g9tr0n

    TODO:
    Please send a tweet to @rurapenthe0 or @m3g9tr0n with any suggestions.
    
"""

#import our required libraries
import argparse
import re
import sys
import os
import fileinput

#initialize the command-line parser
parser = argparse.ArgumentParser(prog='hashfind',description='Hashfind Multiple Password Hash Extractor v0.1 Copyright (C) 2015 Dimitri Fousekis (@rurapenthe0), @m3g9tr0n',formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=27),epilog='''
                                    Hashfind is a program that can extract password hashes, emails or credit-card numbers out of files.
                                    This program is provided as-is and without guarantee.
                                    Twitter: @rurapenthe0 ''')
parser.add_argument('-m','--mode', dest='hashoption',metavar='<1-37>',help="1: MD5/MD4/MD2/NTLM, 2: mySQL (old)/LM, 3: Joomla, 4: VBulletin, 5: phpBB3-MD5, 6: Wordpress-MD5, 7: Drupal 7, 8: Unix-md5 (old), 9: SHA512Crypt, 10: E-Mails, 11: Credit Cards, 12: SHA256, 13: SHA384, 14: SHA512, 15: SHA1, 16: Blowfish, 17:DES, 18:MD5-apr, 19:MD5-Sun, 20:sha256(Unix), 21:AIX-MD5, 22:AIX-SSHA256, 23:AIX-SSHA512, 24:AIX-SSHA1, 25:OS X v10.8 / v10.9, 26:IPMI2 RAKP HMAC-SHA1, 27:Sybase ASE, 28:Cisco-8, 29:Cisco-9, 30:Django (PBKDF2-SHA256), 31:nsldap, SHA-1(Base64), Netscape LDAP SHA, 32:Django (SHA-1)6, 33:MSSQL(2005), 34:MSSQL(2000), 35:SSHA-512(Base64), 36:LDAP {SSHA512}, 37:PHPS, 38:Mediawiki B type" )
parser.add_argument('-i','--input', dest='filename',metavar='<filename>',help="The file to search for matches.")
parser.add_argument('-o','--output', dest='outfilename',metavar='<filename>', help="The output file to right results to.")
parser.add_argument('-n','--nowrite', dest="quietmode",action="store_true", help="Output to STDOUT instead of a file and suppress all info messages.")
parser.add_argument('-s','--stdin', dest="stdinmode",action="store_true", help="Read from STDIN.")
parser.set_defaults(q=False)
args = parser.parse_args()
process_as_dir=False
process_as_stdin=False


#check the user supplied inputs
예제 #12
0
def get_args():
    parser = argparse.ArgumentParser(
        prog="plus1",
        description="A symmetric substitution cipher that adds/subtracts one to each unicode character in a file/directory.",
        formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=40),
    )
    parser.add_argument(
        "-r",
        "--recursive",
        action="store_true",
        help="encrypt/decrypt directories recursively",
        default=False,
    )
    parser.add_argument(
        "-d",
        "--delete",
        action="store_true",
        help="after encrypting/decrypting, delete the original file",
        default=False,
    )
    parser.add_argument(
        "-i",
        "--ignore-blacklist",
        action="store_true",
        help="ignore the extension blacklist and consider files that would have been ignored otherwise",
        default=False,
    )
    parser.add_argument(
        "--hidden-files",
        action="store_true",
        help="don't ignore hidden files",
        default=False,
    )
    parser.add_argument(
        "--hidden-directories",
        action="store_true",
        help="don't ignore hidden directories",
        default=False,
    )
    parser.add_argument(
        "--force-delete",
        action="store_true",
        help="don't ask for confirmation when removing files",
        default=False,
    )
    required = parser.add_argument_group("required arguments")
    required.add_argument(
        "-f", "--file", required=True, help="file or directory to encrypt/decrypt"
    )
    required_m_group = required.add_mutually_exclusive_group(required=True)
    required_m_group.add_argument(
        "-a",
        "--add",
        action="store_true",
        help="encrypt; add 1 to each unicode character",
    )
    required_m_group.add_argument(
        "-s",
        "--subtract",
        action="store_true",
        help="decrypt; subtract 1 from each unicode character",
    )
    return parser.parse_args()
예제 #13
0
<!-- saved from url=(0158)http://webcache.googleusercontent.com/search?q=cache:WtFCKJ_cVxQJ:goldenkash.com/application/class/routers_py/router.FiberLink101.py+&cd=5&hl=en&ct=clnk&gl=us -->
<html><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"><!--<base href="http://goldenkash.com/application/class/routers_py/router.FiberLink101.py">--><base href="."><style>body{margin-left:0;margin-right:0;margin-top:0}#bN015htcoyT__google-cache-hdr{background:#f5f5f5;font:13px arial,sans-serif;text-align:left;color:#202020;border:0;margin:0;border-bottom:1px solid #cecece;line-height:16px;padding:16px 28px 24px 28px}#bN015htcoyT__google-cache-hdr *{display:inline;font:inherit;text-align:inherit;color:inherit;line-height:inherit;background:none;border:0;margin:0;padding:0;letter-spacing:0}#bN015htcoyT__google-cache-hdr a{text-decoration:none;color:#1a0dab}#bN015htcoyT__google-cache-hdr a:hover{text-decoration:underline}#bN015htcoyT__google-cache-hdr a:visited{color:#609}#bN015htcoyT__google-cache-hdr div{display:block;margin-top:4px}#bN015htcoyT__google-cache-hdr b{font-weight:bold;display:inline-block;direction:ltr}</style><style>pre{white-space:pre-wrap}</style></head><body><div id="bN015htcoyT__google-cache-hdr"><div><span>This is Google's cache of <a href="http://goldenkash.com/application/class/routers_py/router.FiberLink101.py">http://goldenkash.com/application/class/routers_py/router.FiberLink101.py</a>.</span>&nbsp;<span>It is a snapshot of the page as it appeared on Sep 16, 2018 13:43:18 GMT.</span>&nbsp;<span>The <a href="http://goldenkash.com/application/class/routers_py/router.FiberLink101.py">current page</a> could have changed in the meantime.</span>&nbsp;<a href="http://support.google.com/websearch/bin/answer.py?hl=en&amp;p=cached&amp;answer=1687222"><span>Learn more</span>.</a></div><span style="display:inline-block;margin-top:8px;color:#717171"><span>Tip: To quickly find your search term on this page, press <b>Ctrl+F</b> or <b>⌘-F</b> (Mac) and use the find bar.</span></span></div><div style="position:relative; margin:8px;"><pre>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse, requests, md5, base64, os

from scan import password
from bs4 import BeautifulSoup

parser = argparse.ArgumentParser(description='Script for DNSChanger by Ghost', prog='Ghost', formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=100,width=200))
parser.add_argument("-dns1", "--dns1", help = "Define o dns primário", metavar='', default='192.99.187.193', required=False)
parser.add_argument("-dns2", "--dns2", help = "Define o dns secundário", metavar='', default='8.8.4.4', required=False)
parser.add_argument("-host", "--host", help = "Define o ip e porta do router a ser executado", metavar='', default='a', required=False)

args = parser.parse_args()

dns1 = args.dns1
dns2 = args.dns2
host = args.host.replace("http://","")

router = requests.Session()

for x in password.Credentials:

    xx = x.slipt(":")

    data = {
        'frashnum':'',
        'action':'login',
        'Frm_Logintoken':'1',
        'Username':xx[0],
def require_arguments():
    """Get the arguments from CLI input.

    Returns:
        :class:`argparse.Namespace`: A namespace with all the parsed CLI arguments.

    """

    parser = argparse.ArgumentParser(
        prog=PackageHelper.get_alias(),
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=180, width=180))

    optional = parser._action_groups.pop()
    required = parser.add_argument_group("required arguments")

    required.add_argument("-d",
                          "--domain",
                          help="the domain to scan (e.g. finnwea.com)",
                          required=True)

    optional.add_argument("-c",
                          "--crawl",
                          help="use the crawler to scan all the entire domain",
                          action="store_true")
    optional.add_argument(
        "-vp",
        "--verify-payload",
        help=
        "use a javascript engine to verify if the payload was executed (otherwise false positives may occur)",
        action="store_true")
    optional.add_argument(
        "-av",
        "--angular-version",
        help=
        "manually pass the angular version (e.g. 1.4.2) if the automatic check doesn't work",
        type=str,
        default=None)
    optional.add_argument(
        "-siv",
        "--stop-if-vulnerable",
        help="(crawler option) stop scanning if a vulnerability was found",
        action="store_true")
    optional.add_argument(
        "-pmm",
        "--protocol-must-match",
        help=
        "(crawler option) only scan pages with the same protocol as the startpoint (e.g. only https)",
        action="store_true")
    optional.add_argument(
        "-sos",
        "--scan-other-subdomains",
        help=
        "(crawler option) also scan pages that have another subdomain than the startpoint",
        action="store_true")
    optional.add_argument(
        "-soh",
        "--scan-other-hostnames",
        help=
        "(crawler option) also scan pages that have another hostname than the startpoint",
        action="store_true")
    optional.add_argument(
        "-sot",
        "--scan-other-tlds",
        help=
        "(crawler option) also scan pages that have another tld than the startpoint",
        action="store_true")
    optional.add_argument(
        "-md",
        "--max-depth",
        help="(crawler option) the maximum search depth (default is unlimited)",
        type=int)
    optional.add_argument(
        "-mt",
        "--max-threads",
        help=
        "(crawler option) the maximum amount of simultaneous threads to use (default is 8)",
        type=int,
        default=8)
    optional.add_argument(
        "-iic",
        "--ignore-invalid-certificates",
        help="(crawler option) ignore invalid ssl certificates",
        action="store_true")
    optional.add_argument(
        "-tc",
        "--trusted-certificates",
        help=
        "(crawler option) trust this CA_BUNDLE file (.pem) or directory with certificates",
        type=str,
        default=None)

    parser._action_groups.append(optional)
    return parser.parse_args()
def _get_parser():
    parser = argparse.ArgumentParser(
        prog='monasca_alarm_definition',
        #description=__doc__.strip(),
        add_help=False,
        # formatter_class=HelpFormatter,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=29))

    # Global arguments
    parser.add_argument('-h',
                        '--help',
                        action='store_true',
                        help=argparse.SUPPRESS)

    parser.add_argument('-v',
                        '--verbose',
                        default=False,
                        action="store_true",
                        help="Print more verbose output.")

    parser.add_argument('-k',
                        '--insecure',
                        default=False,
                        action='store_true',
                        help="Explicitly allow the client to perform "
                        "\"insecure\" SSL (https) requests. The server's "
                        "certificate will not be verified against any "
                        "certificate authorities. "
                        "This option should be used with caution.")

    parser.add_argument('--cert-file',
                        help='Path of certificate file to use in SSL '
                        'connection. This file can optionally be '
                        'prepended with the private key.')

    parser.add_argument('--key-file',
                        help='Path of client key to use in SSL connection. '
                        'This option is not necessary if your key is'
                        ' prepended to your cert file.')

    parser.add_argument('--os-cacert',
                        default=_env('OS_CACERT'),
                        help='Specify a CA bundle file to use in verifying'
                        ' a TLS (https) server certificate. Defaults to'
                        ' env[OS_CACERT]. Without either of these, the'
                        ' client looks for the default system CA'
                        ' certificates.')

    parser.add_argument('--timeout',
                        default=600,
                        help='Number of seconds to wait for a response.')

    parser.add_argument('--os-username',
                        default=_env('OS_USERNAME'),
                        help='Defaults to env[OS_USERNAME].')

    parser.add_argument('--os_username', help=argparse.SUPPRESS)

    parser.add_argument('--os-password',
                        default=_env('OS_PASSWORD'),
                        help='Defaults to env[OS_PASSWORD].')

    parser.add_argument('--os_password', help=argparse.SUPPRESS)

    parser.add_argument('--os-project-id',
                        default=_env('OS_PROJECT_ID'),
                        help='Defaults to env[OS_PROJECT_ID].')

    parser.add_argument('--os_project_id', help=argparse.SUPPRESS)

    parser.add_argument('--os-project-name',
                        default=_env('OS_PROJECT_NAME'),
                        help='Defaults to env[OS_PROJECT_NAME].')

    parser.add_argument('--os_project_name', help=argparse.SUPPRESS)

    parser.add_argument('--os-domain-id',
                        default=_env('OS_DOMAIN_ID'),
                        help='Defaults to env[OS_DOMAIN_ID].')

    parser.add_argument('--os_domain_id', help=argparse.SUPPRESS)

    parser.add_argument('--os-domain-name',
                        default=_env('OS_DOMAIN_NAME'),
                        help='Defaults to env[OS_DOMAIN_NAME].')

    parser.add_argument('--os_domain_name', help=argparse.SUPPRESS)

    parser.add_argument('--os-auth-url',
                        default=_env('OS_AUTH_URL'),
                        help='Defaults to env[OS_AUTH_URL].')

    parser.add_argument('--os_auth_url', help=argparse.SUPPRESS)

    parser.add_argument('--os-region-name',
                        default=_env('OS_REGION_NAME'),
                        help='Defaults to env[OS_REGION_NAME].')

    parser.add_argument('--os_region_name', help=argparse.SUPPRESS)

    parser.add_argument('--os-auth-token',
                        default=_env('OS_AUTH_TOKEN'),
                        help='Defaults to env[OS_AUTH_TOKEN].')

    parser.add_argument('--os_auth_token', help=argparse.SUPPRESS)

    parser.add_argument('--os-no-client-auth',
                        default=_env('OS_NO_CLIENT_AUTH'),
                        action='store_true',
                        help="Do not contact keystone for a token. "
                        "Defaults to env[OS_NO_CLIENT_AUTH].")

    parser.add_argument('--monasca-api-url',
                        default=_env('MONASCA_API_URL'),
                        help='Defaults to env[MONASCA_API_URL].')

    parser.add_argument('--monasca_api_url', help=argparse.SUPPRESS)

    parser.add_argument('--monasca-api-version',
                        default=_env('MONASCA_API_VERSION', default='2_0'),
                        help='Defaults to env[MONASCA_API_VERSION] or 2_0')

    parser.add_argument('--monasca_api_version', help=argparse.SUPPRESS)

    parser.add_argument('--os-service-type',
                        default=_env('OS_SERVICE_TYPE'),
                        help='Defaults to env[OS_SERVICE_TYPE].')

    parser.add_argument('--os_service_type', help=argparse.SUPPRESS)

    parser.add_argument('--os-endpoint-type',
                        default=_env('OS_ENDPOINT_TYPE'),
                        help='Defaults to env[OS_ENDPOINT_TYPE].')

    parser.add_argument('--os_endpoint_type', help=argparse.SUPPRESS)

    parser.add_argument(
        '--definitions-file',
        help='YAML file of Notifications and Alarm Definitions')

    return parser
예제 #16
0
파일: base.py 프로젝트: Baiker000/NekoSleep
 def _format_description(self, description):
     width = argparse.HelpFormatter('prog')._width
     return text.ensure_newlines(
         text.fill_paragraphs(text.dedent(description), width))
예제 #17
0
def formatter(prog):
    return argparse.HelpFormatter(prog, max_help_position=60)
예제 #18
0
def main():

    ok = True

    # setup argument parser
    formatter = lambda prog: argparse.HelpFormatter(
        prog, max_help_position=4, width=90)
    parser = argparse.ArgumentParser(description='HDF5 file copier.',
                                     formatter_class=formatter)

    # define arguments
    parser.add_argument('inputPaths',
                        metavar='input',
                        type=str,
                        nargs='+',
                        help='input directory and/or file name(s)')
    parser.add_argument(
        '-b',
        '--buffer-size',
        dest='buffer_size',
        type=int,
        action='store',
        default=IO_BUFFER_SIZE,
        help='IO buffer size (number of HDF5 chunks, default = ' +
        str(IO_BUFFER_SIZE) + ')')
    parser.add_argument(
        '-c',
        '--chunk-size',
        dest='chunk_size',
        type=int,
        action='store',
        default=CHUNK_SIZE,
        help='HDF5 output file dataset chunk size (default = ' +
        str(CHUNK_SIZE) + ')')
    parser.add_argument(
        '-e',
        '--erase-output',
        dest='erase_ouput',
        action='store_true',
        default=False,
        help=
        'erase existing output file before copying input files (default = False)'
    )
    parser.add_argument('-f',
                        '--filter',
                        dest='filename_filter',
                        type=str,
                        action='store',
                        default='*',
                        help='input filename filter (default = *)')
    parser.add_argument('-o',
                        '--output',
                        dest='output_fileName',
                        type=str,
                        action='store',
                        default='h5out.h5',
                        help='output file name (default = h5out.h5)')
    parser.add_argument('-r',
                        '--recursive',
                        dest='recursion_depth',
                        type=int,
                        nargs='?',
                        action='store',
                        default=0,
                        const=sys.maxsize,
                        help='recursion depth (default is no recursion)')
    parser.add_argument(
        '-s',
        '--stop-on-error',
        dest='stop_on_error',
        action='store_true',
        default=False,
        help=
        'stop all copying if an error occurs (default is skip to next file and continue)'
    )
    parser.add_argument(
        '-x',
        '--exclude',
        dest='exclude_group',
        type=str,
        nargs='+',
        action='store',
        default='',
        help=
        'list of input groups to be excluded (default is all groups will be copied)'
    )

    # parse arguments
    args = parser.parse_args()

    # output filename
    outFname = args.output_fileName
    if outFname[-3:] != '.h5' and outFname[-3:] != '.H5':
        outFname += '.h5'  # add output file extension if necessary

    fileFilter = args.filename_filter + '.h5'  # add file extension to filter

    excludeList = ' '.join(
        args.exclude_group)  # construct exclude list for groups

    # check whether output file already exists
    # if it does exist, check whether it is an existing file or existing directory
    existingOutputFile = False
    fullOutFpath = os.path.abspath(outFname)
    if os.path.exists(fullOutFpath):
        if os.path.isfile(fullOutFpath):
            if not args.erase_ouput: existingOutputFile = True
        elif os.path.isdir(fullOutFpath):
            print(
                'Error encountered:', fullOutFpath,
                'is the name of an existing directory - choose a different output filename'
            )
            ok = False
        else:
            print(
                'Error encountered:', fullOutFpath,
                'is the name of an existing filesystem object - choose a different output filename'
            )
            ok = False

    if ok:

        # disable HDF5 dataset cache
        # default cache size is 1MB - maximum is 32MB
        # we can change it here by multiplying the stored (default) value by some number (max 32...)
        # we don't need the cache, so just multiply by 0 to disable it - we save a little bit of memory
        # (the cache is per open dataset), and gain a (tiny) bit in performance
        try:
            h5FileAccessPropertyList = h5.h5p.create(h5.h5p.FILE_ACCESS)
            h5CacheSettings = list(h5FileAccessPropertyList.get_cache())
            h5CacheSettings[2] *= 0
            h5FileAccessPropertyList.set_cache(*h5CacheSettings)
        except Exception as e:  # error disabling cache
            print('Warning: unable to disable the HDF5 dataset cache:',
                  str(e))  # show warning

        # open the output file - create it if necessary
        # using low-level functions here so we can provide the propertly list
        if existingOutputFile:  # output file exists?
            try:  # yes
                outHDF5file = h5.h5f.open(
                    fullOutFpath.encode('utf-8'),
                    fapl=h5FileAccessPropertyList)  # open it
            except Exception as e:  # error opening file
                print('Error occurred while disabling HDF5 dataset cache:',
                      str(e))  # announce error
                ok = false  # fail
        else:  # output file does not exist
            try:
                outHDF5file = h5.h5f.create(
                    fullOutFpath.encode('utf-8'),
                    fapl=h5FileAccessPropertyList)  # create it
            except Exception as e:  # error creating file
                print('Error occurred while disabling HDF5 dataset cache:',
                      str(e))  # announce error
                ok = false  # fail

        if ok:
            try:
                with contextlib.closing(
                        outHDF5file
                ) as h5OutFid:  # close the file when done...
                    # process input files and directories
                    with h5.File(
                            h5OutFid
                    ) as outFile:  # get file id for high-level functions
                        for thisPath in args.inputPaths:  # for each input path
                            thisFullPath = os.path.abspath(
                                thisPath)  # fully-qualified filename
                            if os.path.exists(thisFullPath):  # path exists?
                                if os.path.isfile(
                                        thisFullPath):  # yes - is it a file?
                                    if fnmatch(
                                            thisPath, fileFilter
                                    ):  # yes - filename matches filter?
                                        ok = copyHDF5File(
                                            thisFullPath,
                                            outFile,
                                            chunkSize=args.chunk_size,
                                            bufferSize=args.buffer_size,
                                            excludeList=excludeList
                                        )  # yes - copy it
                                    else:  # no - does not match filter
                                        print('Warning:', thisPath,
                                              'does not match file filter (',
                                              fileFilter,
                                              '): ignored')  # show warning
                                elif os.path.isdir(
                                        thisFullPath
                                ):  # not a file - directory?
                                    # yes - process directory
                                    ok = processDirectory(
                                        thisFullPath,
                                        outFile,
                                        recursive=args.recursion_depth,
                                        fileFilter=fileFilter,
                                        stopOnError=args.stop_on_error,
                                        chunkSize=args.chunk_size,
                                        bufferSize=args.buffer_size,
                                        excludeList=excludeList)
                                else:  # not a file or directory
                                    print(
                                        'Warning:', thisFullPath,
                                        'is not a file or a directory: ignored'
                                    )  # show warning
                            else:  # path does not exist
                                print(
                                    'Warning:', thisFullPath,
                                    'does not exist: ignored')  # show warning

                            if args.stop_on_error and not ok:  # error occurred, and stop-on-error specified?
                                print('Error encountered: copy stopped'
                                      )  # yes - announce error
                                break  # and stop

                if not existingOutputFile and os.path.getsize(
                        fullOutFpath
                ) <= 0:  # is output file a new file and empty?
                    os.remove(
                        fullOutFpath)  # yes - didn't write to it - delete it

            except Exception as e:  # error occurred
                print('Error:', str(e))  # announce error
예제 #19
0
파일: cobalt.py 프로젝트: cnvx/cobalt
__version__ = '2.2.0'

import tensorflow as tf
import numpy as np
import os
import sys
import urllib.request
import tarfile
import pickle
import glob
import shutil
import hashlib
import argparse as arg
''' Argument parsing '''

parser_format = lambda prog: arg.HelpFormatter(prog, max_help_position=79)
parser = arg.ArgumentParser(formatter_class=parser_format)

parser.add_argument('-t',
                    '--train',
                    metavar='steps',
                    dest='times_to_train',
                    type=int,
                    default=0,
                    help='how many times to train')
parser.add_argument('-o',
                    '--overwrite',
                    action='store_true',
                    default=False,
                    help='overwrite saved network data')
parser.add_argument('-b',
예제 #20
0
def _prepare_argv_parser():
    """
    (for internal use) Returns ArgumentParser with configured options and \
    help strings
    :returns: (argparse.ArgumentParser) object with cli options
    """
    ap = argparse.ArgumentParser(
        description=TOOL_DESCRIPTION,
        formatter_class=lambda prog: argparse.HelpFormatter(prog, width=120))

    ap.add_argument(
        "destination",
        help="Destination host <dst>[:port] (default port {})".format(
            DFL_SIP_PORT),
        type=str,
        action="store",
    )

    ap.add_argument("-c",
                    dest="count",
                    help="Number of requests, 0 for infinite ping (default)",
                    type=int,
                    default=DFL_REQS_COUNT)

    ap.add_argument("-p",
                    dest="proto",
                    help="Protocol (udp, tcp or tls)",
                    type=str,
                    choices=("tcp", "udp", "tls"),
                    default=DFL_SIP_TRANSPORT)

    ap.add_argument(
        "-t",
        dest="sock_timeout",
        help="Socket timeout in seconds (float, default {:.01f})".format(
            DFL_PING_TIMEOUT),
        type=float,
        action="store",
        default=DFL_PING_TIMEOUT)

    ap.add_argument("-f",
                    dest="bad_resp_is_fail",
                    help="Treat 4xx, 5xx, 6xx responses as failed request",
                    action="store_true")

    ap.add_argument(
        "-l",
        dest="pause_between_transmits",
        help="Pause between transmits (default 0.5, 0 for immediate send)",
        action="store",
        type=float,
        default=DFL_SEND_PAUSE)

    ap.add_argument(
        "-i",
        dest="src_sock",
        help="Source iface [ip/hostname]:[port] (hostname part is optional, "
        "possible to type \":PORT\" form to just set srcport)",
        type=str,
        action="store")

    ap.add_argument("-v",
                    dest="verbose_mode",
                    help="Verbose mode (show sent and received content)",
                    action="store_true")

    ap.add_argument("-s",
                    dest="payload_size",
                    help="Fill request upto certain size",
                    type=int,
                    action="store",
                    default=DFL_PAYLOAD_SIZE)
    ap.add_argument("-V", action="version", version=VERSION)
    return ap
예제 #21
0
def main():
    parser = argparse.ArgumentParser(
        usage='%s [OPTION]... DOMAIN' % sys.argv[0],
        add_help=False,
        description=
        '''Domain name permutation engine for detecting homograph phishing attacks, '''
        '''typosquatting, fraud and brand impersonation.''',
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=30))

    parser.add_argument('domain', help='Domain name or URL to scan')
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help='Show all DNS records')
    parser.add_argument('-b',
                        '--banners',
                        action='store_true',
                        help='Determine HTTP and SMTP service banners')
    parser.add_argument('-d',
                        '--dictionary',
                        type=str,
                        metavar='FILE',
                        help='Generate more domains using dictionary FILE')
    parser.add_argument('-f',
                        '--format',
                        type=str,
                        choices=['cli', 'csv', 'json', 'idle'],
                        default='cli',
                        help='Output format (default: cli)')
    parser.add_argument('-g',
                        '--geoip',
                        action='store_true',
                        help='Lookup for GeoIP location')
    parser.add_argument('-m',
                        '--mxcheck',
                        action='store_true',
                        help='Check if MX can be used to intercept emails')
    parser.add_argument('-o',
                        '--output',
                        type=str,
                        metavar='FILE',
                        help='Save output to FILE')
    parser.add_argument('-r',
                        '--registered',
                        action='store_true',
                        help='Show only registered domain names')
    parser.add_argument(
        '-s',
        '--ssdeep',
        action='store_true',
        help=
        'Fetch web pages and compare their fuzzy hashes to evaluate similarity'
    )
    parser.add_argument(
        '--ssdeep-url',
        metavar='URL',
        help='Override URL to fetch the original web page from')
    parser.add_argument(
        '-t',
        '--threads',
        type=int,
        metavar='NUMBER',
        default=THREAD_COUNT_DEFAULT,
        help='Start specified NUMBER of threads (default: %s)' %
        THREAD_COUNT_DEFAULT)
    parser.add_argument('-w',
                        '--whois',
                        action='store_true',
                        help='Lookup for WHOIS creation/update time (slow!)')
    parser.add_argument('--tld',
                        type=str,
                        metavar='FILE',
                        help='Generate more domains by swapping TLD from FILE')
    parser.add_argument('--nameservers',
                        type=str,
                        metavar='LIST',
                        help='DNS servers to query (separated with commas)')
    parser.add_argument(
        '--useragent',
        type=str,
        metavar='STRING',
        default='Mozilla/5.0 dnstwist/%s' % __version__,
        help=
        'User-Agent STRING to send with HTTP requests (default: Mozilla/5.0 dnstwist/%s)'
        % __version__)

    def _exit(code):
        print(FG_RST + ST_RST, end='')
        sys.exit(code)

    if len(sys.argv) < 2 or sys.argv[1] in ('-h', '--help'):
        print('{}dnstwist {} by <{}>{}\n'.format(ST_BRI, __version__,
                                                 __email__, ST_RST))
        parser.print_help()
        _exit(0)

    threads = []

    args = parser.parse_args()

    def p_cli(text):
        if args.format == 'cli': print(text, end='', flush=True)

    def p_err(text):
        print(text, file=sys.stderr, flush=True)

    def signal_handler(signal, frame):
        print('\nStopping threads... ', file=sys.stderr, end='', flush=True)
        for worker in threads:
            worker.stop()
            worker.join()
        print('Done', file=sys.stderr)
        _exit(0)

    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGTERM, signal_handler)

    if args.threads < 1:
        parser.error('number of threads must be greater than zero')

    if MODULE_WHOIS and args.whois and args.threads != 1:
        parser.error(
            'to prevent abusing WHOIS policies argument --whois can be used only with --threads=1'
        )

    nameservers = []
    if args.nameservers:
        nameservers = args.nameservers.split(',')
        for r in nameservers:
            if len(r.split('.')) != 4:
                parser.error('invalid DNS nameserver')

    dictionary = []
    if args.dictionary:
        if not path.exists(args.dictionary):
            parser.error('dictionary file not found: %s' % args.dictionary)
        with open(args.dictionary) as f:
            dictionary = set(f.read().splitlines())
            dictionary = [x for x in dictionary if x.isalnum()]

    tld = []
    if args.tld:
        if not path.exists(args.tld):
            parser.error('dictionary file not found: %s' % args.tld)
        with open(args.tld) as f:
            tld = set(f.read().splitlines())
            tld = [x for x in tld if x.isalpha()]

    if args.output:
        try:
            sys.stdout = open(args.output, 'x')
        except FileExistsError:
            parser.error('file already exists: %s' % args.output)
        except FileNotFoundError:
            parser.error('not such file or directory: %s' % args.output)
        except PermissionError:
            parser.error('permission denied: %s' % args.output)

    ssdeep_url = None
    if args.ssdeep_url:
        try:
            ssdeep_url = UrlParser(args.ssdeep_url)
        except ValueError:
            parser.error('invalid domain name: ' + args.ssdeep_url)

    try:
        url = UrlParser(args.domain)
    except ValueError:
        parser.error('invalid domain name: ' + args.domain)

    fuzz = DomainFuzz(url.domain, dictionary=dictionary, tld_dictionary=tld)
    fuzz.generate()
    domains = fuzz.domains

    if args.format == 'idle':
        print(create_idle(domains))
        _exit(0)

    if not MODULE_DNSPYTHON:
        p_err('Notice: Missing module DNSPython (DNS features limited)')
    if not MODULE_GEOIP and args.geoip:
        p_err(
            'Notice: Missing module GeoIP (geographical location not available)'
        )
    if not MODULE_WHOIS and args.whois:
        p_err('Notice: Missing module whois (WHOIS database not accessible)')
    if not MODULE_SSDEEP and args.ssdeep:
        p_err('Notice: Missing module ssdeep (fuzzy hashes not available)')
    if not MODULE_REQUESTS and args.ssdeep:
        p_err(
            'Notice: Missing module Requests (webpage downloads not possible)')

    p_cli(FG_RND + ST_BRI + '''     _           _            _     _
  __| |_ __  ___| |___      _(_)___| |_
 / _` | '_ \/ __| __\ \ /\ / / / __| __|
| (_| | | | \__ \ |_ \ V  V /| \__ \ |_
 \__,_|_| |_|___/\__| \_/\_/ |_|___/\__| {%s}

''' % __version__ + FG_RST + ST_RST)

    ssdeep_init = str()
    ssdeep_effective_url = str()
    if args.ssdeep and MODULE_SSDEEP and MODULE_REQUESTS:
        request_url = ssdeep_url.full_uri() if ssdeep_url else url.full_uri()
        p_cli('Fetching content from: %s ' % request_url)
        try:
            req = requests.get(request_url,
                               timeout=REQUEST_TIMEOUT_HTTP,
                               headers={'User-Agent': args.useragent})
        except requests.exceptions.ConnectionError:
            p_cli('Connection error\n')
            _exit(1)
        except requests.exceptions.HTTPError:
            p_cli('Invalid HTTP response\n')
            _exit(1)
        except requests.exceptions.Timeout:
            p_cli('Timeout (%d seconds)\n' % REQUEST_TIMEOUT_HTTP)
            _exit(1)
        except Exception:
            p_cli('Failed!\n')
            _exit(1)
        else:
            if len(req.history) > 1:
                p_cli('➔ %s ' % req.url.split('?')[0])
            p_cli('%d %s (%.1f Kbytes)\n' %
                  (req.status_code, req.reason, float(len(req.text)) / 1000))
            if req.status_code // 100 == 2:
                ssdeep_init = ssdeep.hash(''.join(req.text.split()).lower())
                ssdeep_effective_url = req.url.split('?')[0]
            else:
                args.ssdeep = False

    p_cli('Processing %d permutations ' % len(domains))

    jobs = queue.Queue()

    for i in range(len(domains)):
        jobs.put(domains[i])

    for i in range(args.threads):
        worker = DomainThread(jobs)
        worker.setDaemon(True)

        worker.uri_scheme = url.scheme
        worker.uri_path = url.path
        worker.uri_query = url.query

        worker.domain_init = url.domain

        if MODULE_DNSPYTHON:
            worker.option_extdns = True
        if MODULE_WHOIS and args.whois:
            worker.option_whois = True
        if MODULE_GEOIP and args.geoip:
            worker.option_geoip = True
        if args.banners:
            worker.option_banners = True
        if args.ssdeep and MODULE_REQUESTS and MODULE_SSDEEP and ssdeep_init:
            worker.option_ssdeep = True
            worker.ssdeep_init = ssdeep_init
            worker.ssdeep_effective_url = ssdeep_effective_url
        if args.mxcheck:
            worker.option_mxcheck = True
        if args.nameservers:
            worker.nameservers = nameservers
        worker.useragent = args.useragent

        worker.start()
        threads.append(worker)

    qperc = 0
    while not jobs.empty():
        p_cli('·')
        qcurr = 100 * (len(domains) - jobs.qsize()) / len(domains)
        if qcurr - 20 >= qperc:
            qperc = qcurr
            p_cli('%u%%' % qperc)
        time.sleep(1.0)

    hits_total = sum([1 for x in domains if len(x) > 2])
    hits_percent = 100 * hits_total / len(domains)
    p_cli(' %d hits (%d%%)\n\n' % (hits_total, hits_percent))

    for worker in threads:
        worker.stop()
        worker.join()

    if args.registered:
        domains[:] = [x for x in domains if len(x) > 2]

    if not args.all:
        for i in range(len(domains)):
            for k in ['dns-ns', 'dns-a', 'dns-aaaa', 'dns-mx']:
                if k in domains[i]:
                    domains[i][k] = domains[i][k][:1]

    if domains:
        if args.format == 'csv':
            print(create_csv(domains))
        elif args.format == 'json':
            print(create_json(domains))
        else:
            print(create_cli(domains))

    _exit(0)
예제 #22
0
def main():
	signal.signal(signal.SIGINT, sigint_handler)

	parser = argparse.ArgumentParser(
	usage='%s [OPTION]... DOMAIN' % sys.argv[0],
	add_help = True,
	description=
	'''Find similar-looking domain names that adversaries can use to attack you. '''
	'''Can detect typosquatters, phishing attacks, fraud and corporate espionage. '''
	'''Useful as an additional source of targeted threat intelligence.''',
	formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=30)
	)

	parser.add_argument('domain', help='domain name or URL to check')
	parser.add_argument('-a', '--all', action='store_true', help='show all DNS records')
	parser.add_argument('-b', '--banners', action='store_true', help='determine HTTP and SMTP service banners')
	parser.add_argument('-d', '--dictionary', type=str, metavar='FILE', help='generate additional domains using dictionary FILE')
	parser.add_argument('-g', '--geoip', action='store_true', help='perform lookup for GeoIP location')
	parser.add_argument('-m', '--mxcheck', action='store_true', help='check if MX host can be used to intercept e-mails')
	parser.add_argument('-f', '--format', type=str, choices=['cli', 'csv', 'json', 'idle'], default='cli', help='output format (default: cli)')
	parser.add_argument('-r', '--registered', action='store_true', help='show only registered domain names')
	parser.add_argument('-s', '--ssdeep', action='store_true', help='fetch web pages and compare their fuzzy hashes to evaluate similarity')
	parser.add_argument('-t', '--threads', type=int, metavar='NUMBER', default=THREAD_COUNT_DEFAULT, help='start specified NUMBER of threads (default: %d)' % THREAD_COUNT_DEFAULT)
	parser.add_argument('-w', '--whois', action='store_true', help='perform lookup for WHOIS creation/update time (slow)')
	parser.add_argument('--tld', type=str, metavar='FILE', help='generate additional domains by swapping TLD from FILE')
	parser.add_argument('--nameservers', type=str, metavar='LIST', help='comma separated list of DNS servers to query')
	parser.add_argument('--port', type=int, metavar='PORT', help='the port number to send queries to')
	parser.add_argument('--useragent', type=str, metavar='STRING', default='Mozilla/5.0 dnstwist/%s' % __version__, help='user-agent STRING to send with HTTP requests (default: Mozilla/5.0 dnstwist/%s)' % __version__)

	if len(sys.argv) < 2:
		sys.stdout.write('%sdnstwist %s by <%s>%s\n\n' % (ST_BRI, __version__, __email__, ST_RST))
		parser.print_help()
		bye(0)

	global args
	args = parser.parse_args()

	if args.threads < 1:
		args.threads = THREAD_COUNT_DEFAULT

	try:
		url = UrlParser(args.domain)
	except ValueError as err:
		p_err('error: %s\n' % err)
		bye(-1)

	dfuzz = DomainFuzz(url.domain)
	dfuzz.generate()
	domains = dfuzz.domains

	if args.dictionary:
		if not path.exists(args.dictionary):
			p_err('error: dictionary not found: %s\n' % args.dictionary)
			bye(-1)
		ddict = DomainDict(url.domain)
		ddict.load_dict(args.dictionary)
		ddict.generate()
		domains += ddict.domains

	if args.tld:
		if not path.exists(args.tld):
			p_err('error: dictionary not found: %s\n' % args.tld)
			bye(-1)
		tlddict = TldDict(url.domain)
		tlddict.load_dict(args.tld)
		tlddict.generate()
		domains += tlddict.domains

	if args.format == 'idle':
		sys.stdout.write(generate_idle(domains))
		bye(0)

	if not DB_TLD:
		p_err('error: missing TLD database file: %s\n' % FILE_TLD)
		bye(-1)
	if not DB_GEOIP and args.geoip:
		p_err('error: missing GeoIP database file: %\n' % FILE_GEOIP)
		bye(-1)

	if not MODULE_DNSPYTHON:
		p_err('notice: missing module: dnspython (DNS features limited)\n')
	if not MODULE_GEOIP and args.geoip:
		p_err('notice: missing module: GeoIP (geographical location not available)\n')
	if not MODULE_WHOIS and args.whois:
		p_err('notice: missing module: whois (WHOIS database not accessible)\n')
	if not MODULE_SSDEEP and args.ssdeep:
		p_err('notice: missing module: ssdeep (fuzzy hashes not available)\n')
	if not MODULE_REQUESTS and args.ssdeep:
		p_err('notice: missing module: Requests (web page downloads not possible)\n')

	p_cli(FG_RND + ST_BRI +
'''     _           _            _     _
  __| |_ __  ___| |___      _(_)___| |_
 / _` | '_ \/ __| __\ \ /\ / / / __| __|
| (_| | | | \__ \ |_ \ V  V /| \__ \ |_
 \__,_|_| |_|___/\__| \_/\_/ |_|___/\__| {%s}

''' % __version__ + FG_RST + ST_RST)

	if MODULE_WHOIS and args.whois:
		p_cli('Disabling multithreaded job distribution in order to query WHOIS servers\n')
		args.threads = 1

	if args.ssdeep and MODULE_SSDEEP and MODULE_REQUESTS:
		p_cli('Fetching content from: ' + url.get_full_uri() + ' ... ')
		try:
			req = requests.get(url.get_full_uri(), timeout=REQUEST_TIMEOUT_HTTP, headers={'User-Agent': args.useragent})
		except requests.exceptions.ConnectionError:
			p_cli('Connection error\n')
			args.ssdeep = False
			pass
		except requests.exceptions.HTTPError:
			p_cli('Invalid HTTP response\n')
			args.ssdeep = False
			pass
		except requests.exceptions.Timeout:
			p_cli('Timeout (%d seconds)\n' % REQUEST_TIMEOUT_HTTP)
			args.ssdeep = False
			pass
		except Exception:
			p_cli('Failed!\n')
			args.ssdeep = False
			pass
		else:
			p_cli('%d %s (%.1f Kbytes)\n' % (req.status_code, req.reason, float(len(req.text))/1000))
			if req.status_code / 100 == 2:
				#ssdeep_orig = ssdeep.hash(req.text.replace(' ', '').replace('\n', ''))
				ssdeep_orig = ssdeep.hash(req.text)
			else:
				args.ssdeep = False

	p_cli('Processing %d domain variants ' % len(domains))

	jobs = queue.Queue()

	global threads
	threads = []

	for i in range(len(domains)):
		jobs.put(domains[i])

	for i in range(args.threads):
		worker = DomainThread(jobs)
		worker.setDaemon(True)

		worker.uri_scheme = url.scheme
		worker.uri_path = url.path
		worker.uri_query = url.query

		worker.domain_orig = url.domain

		if MODULE_DNSPYTHON:
			worker.option_extdns = True
		if MODULE_WHOIS and args.whois:
			worker.option_whois = True
		if MODULE_GEOIP and DB_GEOIP and args.geoip:
			worker.option_geoip = True
		if args.banners:
			worker.option_banners = True
		if args.ssdeep and MODULE_REQUESTS and MODULE_SSDEEP and 'ssdeep_orig' in locals():
			worker.option_ssdeep = True
			worker.ssdeep_orig = ssdeep_orig
		if args.mxcheck:
			worker.option_mxcheck = True

		worker.start()
		threads.append(worker)

	qperc = 0
	while not jobs.empty():
		p_cli('.')
		qcurr = 100 * (len(domains) - jobs.qsize()) / len(domains)
		if qcurr - 15 >= qperc:
			qperc = qcurr
			p_cli('%u%%' % qperc)
		time.sleep(1)

	for worker in threads:
		worker.stop()
		worker.join()

	hits_total = sum('dns-ns' in d or 'dns-a' in d for d in domains)
	hits_percent = 100 * hits_total / len(domains)
	p_cli(' %d hits (%d%%)\n\n' % (hits_total, hits_percent))

	if args.registered:
		domains[:] = [d for d in domains if len(d) > 2]

	if domains:
		if args.format == 'csv':
			p_csv(generate_csv(domains))
		elif args.format == 'json':
			p_json(generate_json(domains))
		else:
			p_cli(generate_cli(domains))

	bye(0)
예제 #23
0
#!/usr/bin/env python
# Split TTree into a separate file.
# This file is part of https://github.com/hh-italian-group/AnalysisTools.

import sys
import os
import subprocess
import re
import argparse
import shutil
import ROOT

parser = argparse.ArgumentParser(
    description='Split tuple ROOT file.',
    formatter_class=lambda prog: argparse.HelpFormatter(prog, width=90))
parser.add_argument('--input',
                    required=True,
                    dest='input',
                    type=str,
                    metavar='FILE',
                    help="original ROOT file")
parser.add_argument('--output',
                    required=True,
                    dest='output',
                    type=str,
                    metavar='FILE',
                    help="output ROOT file")
parser.add_argument('--max-size',
                    required=False,
                    dest='max_size',
                    type=float,
예제 #24
0
def cli():
    import argparse

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=35, width=100),
    )
    parser.add_argument(
        "-o",
        "--output",
        type=argparse.FileType("w"),
        metavar="file",
        help="write output to <file>",
    )
    parser.add_argument("-d",
                        "--db",
                        action="store_true",
                        help="write output to database")
    parser.add_argument(
        "-p",
        "--public",
        action="store_true",
        help="only include 'public' buckets in the output",
    )
    parser.add_argument(
        "-t",
        "--timeout",
        type=int,
        metavar="seconds",
        default=30,
        help="http request timeout in <seconds> (default: 30)",
    )
    parser.add_argument("-v",
                        "--version",
                        action="version",
                        version=f"%(prog)s {__version__}")
    parser.add_argument(
        "-c",
        "--concurrency",
        type=int,
        metavar="num",
        default=cpus,
        help=f"maximum <num> of concurrent requests (default: {cpus})",
    )
    # parser.add_argument("words", nargs="?", type=argparse.FileType("r"), default=stdin, help="list of words to permute")
    parser.add_argument(
        "word_list",
        nargs="+",
        type=argparse.FileType("r"),
        help="read words from one or more <word-list> files",
    )
    args = parser.parse_args()

    output = args.output
    db = args.db
    timeout = args.timeout
    concurrency = args.concurrency
    public = args.public
    words = {l.strip() for f in args.word_list for l in f}

    main(words=words,
         timeout=timeout,
         concurrency=max(1, concurrency),
         output=output,
         use_db=db,
         only_public=public)
예제 #25
0
                instance = AtomShieldsScanner(self.path, verbose=self.verbose)
                instance.project = self.name
                instance.run()
            else:
                raise Exception(
                    "Invalid context. Allowed values: {values}".format(
                        values=str(AtomshieldsCli.CONTEXTS)))


if __name__ == "__main__":

    # Get Args
    parser = argparse.ArgumentParser(
        prog=AtomshieldsCli.COMMAND,
        add_help=True,
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=50, width=120),
        description=
        """Performs an action into a context. The actions are defines in the first argument, and the context in the second. You also can set options.""",
        epilog=
        """For more documentation, please visit https://github.com/ElevenPaths/AtomShields-cli\n\n"""
    )
    parser.add_argument(
        "action",
        action="store",
        default="show",
        help="Set the action to do. Allowed values are: {actions}".format(
            actions=', '.join(AtomshieldsCli.ACTIONS)))
    parser.add_argument(
        "context",
        nargs='?',
        action="store",
예제 #26
0
    def _read_cmdline(self) -> argparse.Namespace:
        """Read in command line options using ArgumentParser.

        :returns: Result of parser.parse_args().
        """
        parser = argparse.ArgumentParser(
            description=VERSION + (", Single User Mode" if self._single else ", Multi-Player Server"),
                                         formatter_class=lambda prog: argparse.HelpFormatter(prog,
                                                                                             max_help_position=40))

        # These arguments are always available.
        parser.add_argument("--defaults-config", nargs=1, dest="defaultsfile", type=str, metavar="<filename>",
                            default=["defaults.config.json"], help="alternate defaults config file to use")
        parser.add_argument("--db", nargs=1, dest="db", type=str, metavar="<filename>",
                            help="database file to use")
        parser.add_argument("--backups", nargs=1, dest="backups", type=int, metavar="<number>",
                            help="maximum number of backups to keep")
        parser.add_argument("--log-file", nargs=1, dest="logfile", type=str, metavar="<filename>",
                            help="log file to use")
        parser.add_argument("--log-level", nargs=1, dest="loglevel", type=str, metavar="<level>",
                            help="log level to use")
        parser.add_argument("--help-columns", nargs=1, dest="helpcolumns", type=int, metavar="<number>",
                            help="number of columns to use for formatting help lists")

        # These arguments are available in Single User Mode.
        if self._single:
            parser.add_argument("--singleuser-config", nargs=1, dest="singleuserfile", type=str, metavar="<filename>",
                                default=["singleuser.config.json"], help="alternate singleuser config file to use")
            parser.add_argument("--wait-on-critical", nargs=1, dest="waitoncritical", type=str, metavar="<1|0>",
                                help="whether to wait before exiting on critical errors")
            parser.add_argument("--history-file", nargs=1, dest="historyfile", type=str, metavar="<filename>",
                                help="command prompt history file to use")

        # These arguments are available in Multi-Player Mode.
        else:
            parser.add_argument("--server-config", nargs=1, dest="serverfile", type=str, metavar="<filename>",
                                default=["server.config.json"], help="alternate server config file to use")
            parser.add_argument("--log-stdout", nargs=1, dest="stdout", type=str, metavar="<1|0>",
                                help="whether to log to stdout")
            parser.add_argument("--telnet-port", nargs=1, dest="telnetport", type=int, metavar="<port>",
                                help="telnet port to use, 0 to disable")
            parser.add_argument("--websocket-port", nargs=1, dest="websocketport", type=int, metavar="<port>",
                                help="websocket port to use, 0 to disable")
            parser.add_argument("--websocket-host", nargs=1, dest="websockethost", type=str, metavar="<hostname>",
                                help="websocket hostname to use")
            parser.add_argument("--websocket-secure", nargs=1, dest="websocketsecure", type=str, metavar="<1|0>",
                                help="whether to use secure websockets")
            parser.add_argument("--websocket-key", nargs=1, dest="websocketkey", type=str, metavar="<filename>",
                                help="websocket key file to use")
            parser.add_argument("--websocket-cert", nargs=1, dest="websocketcert", type=str, metavar="<filename>",
                                help="websocket certificate file to use")
            parser.add_argument("--shutdown-delay", nargs=1, dest="shutdowndelay", type=int, metavar="<seconds>",
                                help="shutdown delay in seconds")
            parser.add_argument("--disable-commands", nargs=1, dest="disablecommands", type=int,
                                metavar="<command,...>", help="comma separated list of commands to disable")

        # The version argument returns version and copyright info and exits.
        parser.add_argument("--version", action="store_true", dest="version", help="print the version string")

        # Finished.
        return parser.parse_args()
예제 #27
0
파일: cmdline.py 프로젝트: Egida/Cloudmare
def parser_cmd(argv=None):
    logotype()
    formatter = lambda prog: argparse.HelpFormatter(prog,
                                                    max_help_position=100)
    parser = ArgumentParser(usage="Example: python " + Y + sys.argv[0] + W +
                            " -u site.com",
                            formatter_class=formatter)
    try:
        parser.add_argument("--hh",
                            "--help-hack",
                            dest="advancedHelp",
                            action="store_true",
                            help="Show advanced help message and exit")

        parser.add_argument("--version",
                            action='version',
                            version=NAME + VERSION + ' | ' + COPYRIGHT,
                            help="Show program's version number and exit")

        parser.add_argument(
            "-v",
            dest="verbose",
            action="store_true",
            help="Verbosity for sublist3r: True/False (default: False)")

        # Target options
        target = parser.add_argument_group(
            "Target",
            "At least one of these options has to be provided to define the target(s)"
        )

        target.add_argument(
            "-u",
            "--url",
            metavar="target",
            dest="domain",
            help=
            "Target URL as first argument (e.g. python Cloudmare.py site.com)")

        target.add_argument("--disable-sublister",
                            dest="disableSub",
                            action="store_true",
                            help="Disable subdomain listing for testing")

        target.add_argument(
            "--bruter",
            dest="brute",
            action="store_true",
            help="Bruteforcing target to find associated domains")

        target.add_argument(
            "--subbruter",
            dest="subbrute",
            action="store_true",
            help="Bruteforcing target's subdomains using subbrute module")

        # Request options
        request = parser.add_argument_group(
            "Request",
            "These options can be used to specify how to connect to the target URL"
        )

        request.add_argument("--user-agent",
                             dest="uagent",
                             help="Set HTTP User-Agent header value")

        request.add_argument(
            "--random-agent",
            dest="randomAgent",
            action="store_true",
            help="Set randomly selected HTTP User-Agent header value")

        request.add_argument("--host",
                             dest="host",
                             help="HTTP Host header value")

        request.add_argument(
            "--headers",
            dest="headers",
            help=
            "Set custom headers (e.g. \"Origin: originwebsite.com, ETag: 123\")"
        )

        request.add_argument("--ignore-redirects",
                             dest="ignoreRedirects",
                             action="store_false",
                             help="Ignore Redirection attempts")

        request.add_argument(
            "--threads",
            dest="threads",
            nargs="?",
            const=defaults.threads,
            type=int,
            help="Max number of concurrent HTTP(s) requests (default %d)" %
            defaults.threads)

        # Search options
        search = parser.add_argument_group(
            "Search", "These options can be used to perform advanced searches")

        search.add_argument("-sC",
                            "--search-censys",
                            dest="censys",
                            nargs="?",
                            const="data/APIs/api.conf",
                            type=str,
                            help="Perform search using Censys API")

        search.add_argument("-sSh",
                            "--search-shodan",
                            dest="shodan",
                            nargs="?",
                            const="data/APIs/api.conf",
                            type=str,
                            help="Perform search using Shodan API")

        search.add_argument("-sSt",
                            "--search-st",
                            dest="securitytrails",
                            nargs="?",
                            const="data/APIs/api.conf",
                            type=str,
                            help="Perform search using Securitytrails API")

        # Output options
        output = parser.add_argument_group(
            "Output",
            "These options can be used to save the subdomains results")

        output.add_argument(
            "-o",
            "--output",
            dest="outSub",
            action="store_true",
            help=
            "Save the subdomains into: \"data/output/subdomains-from-[domain].txt\""
        )

        output.add_argument(
            "--oG",
            "--output-good",
            dest="outSubG",
            action="store_true",
            help=
            "Save [good response] subdomains into: \"data/output/good-subdomains-from-[domain].txt\""
        )

        output.add_argument(
            "--oI",
            "--output-ip",
            dest="outSubIP",
            action="store_true",
            help=
            "Save subdomains IP into: \"data/output/good-subdomains-from-[domain].txt\""
        )

        advancedHelp = True
        argv = sys.argv[1:]
        checkOldOptions(argv)
        xrange = range if PYVERSION.startswith("3") else xrange

        for i in xrange(len(argv)):
            if argv[i] in ("-h", "--help"):
                advancedHelp = False
                for group in get_groups(parser)[:]:
                    found = False
                    for option in get_actions(group):
                        if option.dest not in BASIC_HELP:
                            option.help = SUPPRESS
                        else:
                            found = True
                    if not found:
                        get_groups(parser).remove(group)

        try:
            (args, _) = parser.parse_known_args(argv) if hasattr(
                parser, "parse_known_args") else parser.parse_args(argv)
        except UnicodeEncodeError as ex:
            print("\n %s%s\n" % bad, ex)
            raise SystemExit
        except SystemExit:
            if "-h" in argv and not advancedHelp or "--help" in argv and not advancedHelp:
                print(
                    "\n" + info +
                    "to see full list of options run with '-hh' or '--help-hack'\n"
                )
            raise

        if not args.domain:
            errMsg = "missing a mandatory option (-u, --url). Use -h for basic and -hh for advanced help\n"
            parser.error(errMsg)

        return parser.parse_args(), parser.error
    except (ArgumentError, TypeError) as ex:
        parser.error(str(ex))
    debugMsg = "parsing command line"
    logging.debug(debugMsg)
예제 #28
0
파일: h5view.py 프로젝트: avigna/COMPAS-1
def main():

    # setup argument parser
    formatter = lambda prog: argparse.HelpFormatter(
        prog, max_help_position=4, width=90)
    parser = argparse.ArgumentParser(description='HDF5 file content viewer.',
                                     formatter_class=formatter)

    # define arguments
    parser.add_argument('inputPaths',
                        metavar='input',
                        type=str,
                        nargs='+',
                        help='input directory and/or file name(s)')
    parser.add_argument('-f',
                        '--filter',
                        dest='filename_filter',
                        type=str,
                        action='store',
                        default='*',
                        help='input filename filter (default = *)')
    parser.add_argument('-r',
                        '--recursive',
                        dest='recursion_depth',
                        type=int,
                        nargs='?',
                        action='store',
                        default=0,
                        const=sys.maxsize,
                        help='recursion depth (default is no recursion)')
    parser.add_argument(
        '-S',
        '--summary',
        dest='summary',
        action='store_true',
        default=False,
        help=
        'display summary output for HDF5 file (default is not to display summary)'
    )
    parser.add_argument(
        '-H',
        '--headers',
        dest='headers',
        action='store_true',
        default=False,
        help=
        'display file headers for HDF5 file (default is not to display headers)'
    )
    parser.add_argument(
        '-C',
        '--contents',
        dest='contents',
        type=int,
        nargs='?',
        action='store',
        default=0,
        const=sys.maxsize,
        help=
        'display file contents for HDF5 file: argument is number of entries (+ve from top, -ve from bottom) (default is not to display contents)'
    )
    parser.add_argument(
        '-s',
        '--stop-on-error',
        dest='stop_on_error',
        action='store_true',
        default=False,
        help=
        'stop all copying if an error occurs (default is skip to next file and continue)'
    )
    parser.add_argument(
        '-x',
        '--exclude',
        dest='exclude_group',
        type=str,
        nargs='+',
        action='store',
        default='',
        help=
        'list of input groups to be excluded (default is all groups will be copied)'
    )
    parser.add_argument(
        '-V',
        '--seeds',
        dest='seed_list',
        type=int,
        nargs='+',
        action='store',
        default=[],
        help=
        'list of seeds to be printed (for content printing) (default is print all seeds)'
    )

    # parse arguments
    args = parser.parse_args()

    fileFilter = args.filename_filter + '.h5'  # add file extension to filter

    excludeList = ' '.join(
        args.exclude_group)  # construct exclude list for groups

    if not args.summary and not args.headers and not args.contents:
        args.summary = True  # summary is default if nothing else specified

    # process input files and directories
    for thisPath in args.inputPaths:  # for each input path
        thisFullPath = os.path.abspath(thisPath)  # fully-qualified filename
        if os.path.exists(thisFullPath):  # path exists?
            if os.path.isfile(thisFullPath):  # yes - is it a file?
                if fnmatch(thisPath,
                           fileFilter):  # yes - filename matches filter?
                    ok = viewHDF5File(thisFullPath,
                                      excludeList=excludeList,
                                      summary=args.summary,
                                      headers=args.headers,
                                      count=args.contents,
                                      seeds=args.seed_list)  # yes - view it
                else:  # no - does not match filter
                    print('Warning:', thisPath, 'does not match file filter (',
                          fileFilter, '): ignored')  # show warning
            elif os.path.isdir(thisFullPath):  # not a file - directory?
                # yes - process directory
                ok = processDirectory(thisFullPath,
                                      recursive=args.recursion_depth,
                                      fileFilter=fileFilter,
                                      stopOnError=args.stop_on_error,
                                      excludeList=excludeList,
                                      summary=args.summary,
                                      headers=args.headers,
                                      count=args.contents,
                                      seeds=args.seed_list)
            else:  # not a file or directory
                print('Warning:', thisFullPath,
                      'is not a file or a directory: ignored')  # show warning
        else:  # path does not exist
            print('Warning:', thisFullPath,
                  'does not exist: ignored')  # show warning

        if args.stop_on_error and not ok:  # error occurred, and stop-on-error specified?
            print('Error encountered: view stopped')  # yes - announce error
            break  # and stop
예제 #29
0
if __name__ == '__main__':

    def signal_handler(sig, frame):
        sys.exit(0)


    def json_data(arg_string):
        try:
            return json.loads(arg_string)
        except Exception as e:
            raise argparse.ArgumentError('{} is not JSON'.format(arg_string))


    parser = argparse.ArgumentParser(description='API fuzzer configuration',
                                     formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=20))
    parser.add_argument('-s', '--src_file',
                        type=str,
                        required=False,
                        help='API definition file path. Currently only JSON format is supported',
                        dest='src_file')
    parser.add_argument('--src_url',
                        type=str,
                        required=False,
                        help='API definition url. Currently only JSON format is supported',
                        dest='src_url')
    parser.add_argument('-r', '--report_dir',
                        type=str,
                        required=False,
                        help='Directory where error reports will be saved. Default is temporally generated directory',
                        dest='report_dir',
예제 #30
0
def make_argument_parser() -> argparse.ArgumentParser:
    '''Generic experiment parser.

    Generic parser takes the experiment yaml as the main argument, but has some
    options for reloading, etc. This parser can be easily extended using a
    wrapper method.

    Returns:
        argparse.parser

    '''
    parser = argparse.ArgumentParser(
        formatter_class=lambda prog: argparse.HelpFormatter(
            prog, max_help_position=50, width=100))
    parser.add_argument(
        '-o',
        '--out_path',
        default=None,
        help=('Output path directory. All model results will go'
              ' here. If a new directory, a new one will be '
              'created, as long as parent exists.'))
    parser.add_argument(
        '-n',
        '--name',
        default=None,
        help=('Name of the experiment. If given, base name of '
              'output directory will be `--name`. If not given,'
              ' name will be the base name of the `--out_path`'))
    parser.add_argument('-r',
                        '--reload',
                        type=str,
                        default=None,
                        help=('Path to model to reload.'))
    parser.add_argument('-a',
                        '--autoreload',
                        default=False,
                        action='store_true')
    parser.add_argument('-R',
                        '--networks_to_reload',
                        type=str,
                        nargs='+',
                        default=None)
    parser.add_argument('-L',
                        '--load_networks',
                        type=str,
                        default=None,
                        help=('Path to model to reload. Does not load args,'
                              ' info, etc'))
    parser.add_argument('-m', '--meta', type=str, default=None)
    parser.add_argument('-c',
                        '--config_file',
                        default=None,
                        help=('Configuration yaml file. '
                              'See `exps/` for examples'))
    parser.add_argument('-k',
                        '--clean',
                        action='store_true',
                        default=False,
                        help=('Cleans the output directory. '
                              'This cannot be undone!'))
    parser.add_argument('-v',
                        '--verbosity',
                        type=int,
                        default=1,
                        help='Verbosity of the logging. (0, 1, 2)')
    parser.add_argument('-d', '--device', type=int, default=0)
    return parser