示例#1
0
def get_args():
    parser = ArgumentParser(
        description=
        """Given a directory containing bulk fast5 files output a CSV containing the run 
                    information for them""",
        formatter_class=ArgumentDefaultsHelpFormatter,
        add_help=False)
    general = parser.add_argument_group(title='General options')
    general.add_argument("-h",
                         "--help",
                         action="help",
                         help="Show this help and exit")
    in_args = parser.add_argument_group(title='Input sources')
    in_args.add_argument("-d",
                         "--dir",
                         help="A directory containing bulk-fast5-files",
                         type=str,
                         required=True,
                         metavar='')
    out_args = parser.add_argument_group(title='Output sources')
    out_args.add_argument("-o",
                          "--out",
                          help="Output csv filename",
                          type=str,
                          default='bulk_info.csv',
                          required=True,
                          metavar='')
    return parser.parse_args()
示例#2
0
def get_arguments():
    parser = MyParser(description='Basecall reads in real-time with Guppy',
                      formatter_class=MyHelpFormatter, add_help=False)

    required = parser.add_argument_group('Required')
    required.add_argument('-i', '--in_dir', type=pathlib.Path, required=True,
                          help='Input directory (will be searched recursively for fast5s)')
    required.add_argument('-o', '--out_dir', type=pathlib.Path, required=True,
                          help='Output directory')
    required.add_argument('--barcodes', type=str, required=True,
                          help='Which barcodes to use ({})'.format(join_with_or(BARCODING)))
    required.add_argument('--model', type=str, required=True,
                          help='Which basecalling model to use '
                               '({})'.format(join_with_or(BASECALLING)))

    options = parser.add_argument_group('Options')
    options.add_argument('--batch_size', type=int, required=False, default=10,
                         help='Number of fast5 files to basecall per batch')
    options.add_argument('--stop_time', type=int, required=False, default=60,
                         help="Automatically stop when a new fast5 file hasn't been seen for this "
                              "many minutes")
    options.add_argument('--cpu', action='store_true',
                         help='Use the CPU for basecalling (default: use the GPU)')
    options.add_argument('--trans_window', type=int, required=False, default=60,
                         help='The time window size (in minutes) for the translocation speed '
                              'summary')
    options.add_argument('-h', '--help', action='help',
                         help='Show this help message and exit')

    args = parser.parse_args()
    check_arguments(args)
    return args
示例#3
0
def parse_args():
    parser = ArgumentParser(usage="Usage: '%(prog)s [options] <url> <site_id>")

    # Database options
    group = parser.add_argument_group('Database options')
    group.add_argument('-u', '--user', dest='db_user',
                       help='Database user name',
                       default='root')
    group.add_argument('-p', '--password', dest='db_password',
                       help='Database user password',
                       default='')
    group.add_argument('-d', dest='db_name', required=True,
                       help='Name of the database where data will be stored')
    group.add_argument('--host', dest='db_hostname',
                       help='Name of the host where the database server is running',
                       default='localhost')
    group.add_argument('--port', dest='db_port',
                       help='Port of the host where the database server is running',
                       default='3306')

    # Piwik options
    group = parser.add_argument_group('Piwik options')
    group.add_argument('--start-date', dest='start_date', required=True)
    group.add_argument('--end-date', dest='end_date', default='today')
    group.add_argument('--key', dest='key', required=True,
                       help='Piwik auth key')

    # Positional arguments
    parser.add_argument('url', help='Piwik server URL')
    parser.add_argument('site_id', help='Identifier of the site')

    # Parse arguments
    args = parser.parse_args()

    return args
示例#4
0
	def parse_cl_args(self):
		parser = argparse.ArgumentParser(description = 'Sync one or more directories with your Picasa Web account. If only one directory is given and it doesn\'t contain any supported file, it is assumed to be the parent of all the local albums.')
		parser.add_argument('-n', '--dry-run', dest = 'dry_run', action = 'store_true', help = 'Do everything except creating or deleting albums and photos')
		parser.add_argument('-D', '--debug', dest = 'debug', action = 'store_true', help = 'Debug Picasa API usage')
		parser.add_argument('-v', '--verbose', dest = 'verbose', action = 'count', help = 'Verbose output (can be given more than once)')
		parser.add_argument('-m', '--max-photos', metavar = 'NUMBER', dest = 'max_photos', type = int, default = self.MAX_PHOTOS_PER_ALBUM, help = 'Maximum number of photos in album (limited to %s)' % self.MAX_PHOTOS_PER_ALBUM)
		parser.add_argument('-u', '--upload', dest = 'upload', action = 'store_true', help = 'Upload missing remote photos')
		parser.add_argument('-d', '--download', dest = 'download', action = 'store_true', help = 'Download missing local photos')
		parser.add_argument('-r', '--update', dest = 'update', action = 'store_true', help = 'Update changed local or remote photos')
		parser.add_argument('-t', '--threads', dest = 'threads', type = int, nargs = '?', const = self.ncores, default = 1, help = 'Multithreaded operation. Set number of threads to use on album processing. If not given defaults to 1, if given without argument, defaults to number of CPU cores ({} in this system).'.format(self.ncores))
		parser.add_argument('-o', '--origin', dest = 'origin', metavar = 'ORIGINS', type = ListParser(choices = ('filename', 'exif', 'stat')), default = ['exif', 'stat'], help = 'Timestamp origin. ORIGINS is a comma separated list of values "filename", "exif" or "stat" which will be probed in order. Default is "exif,stat".')
		group = parser.add_argument_group('DANGEROUS', 'Dangerous options that should be used with care')
		group.add_argument('--max-size', dest = 'max_size', type = ListParser(unique = False, type = int, nargs = 2), default = self.MAX_PHOTO_SIZE, help = 'Maximum size of photo when using --transform=resize. Default is {},{}.'.format(*self.MAX_PHOTO_SIZE))
		group.add_argument('--force-update', dest = 'force_update', choices = ('full', 'metadata'), nargs = '?', const = 'full', help = 'Force updating photos regardless of modified status (Assumes --update). If no argument given, it assumes full.')
		group.add_argument('--delete-photos', dest = 'delete_photos', action = 'store_true', help = 'Delete remote or local photos not present on the other album')
		group.add_argument('--strip-exif', dest = 'strip_exif', action = 'store_true', help = 'Strip EXIF data from your photos on upload.')
		group.add_argument('--transform', dest = 'transform', metavar = 'TRANSFORMS', type = ListParser(choices = ('raw', 'rotate', 'resize')), help = 'Transform the local files before uploading them. TRANSFORMS is a list of transformations to apply, from "raw", "rotate" and "resize".')
		group = parser.add_argument_group('VERY DANGEROUS', 'Very dangerous options that should be used with extreme care')
		group.add_argument('--delete-albums', dest = 'delete_albums', action = 'store_true', help = 'Delete remote or local albums not present on the other system')
		parser.add_argument('paths', metavar = 'PATH', nargs = '+', help = 'Parent directory of the albums to sync')
		cl_args = parser.parse_args()

		if cl_args.verbose == 1:
			log_level = logging.INFO
		elif cl_args.verbose >= 2:
			log_level = logging.DEBUG
		else:
			log_level = logging.WARNING

		logging.basicConfig(level = log_level, format = '%(asctime)s %(levelname)s [%(thread)x] %(name)s %(message)s')
		sys.stdout = StreamLogger(sys.stdout, '[stdout] ')

		if cl_args.max_photos > self.MAX_PHOTOS_PER_ALBUM:
			self.LOG.warn('Maximum number of photos in album is bigger than the Picasa limit ({}), using this number as limit'.format(self.MAX_PHOTOS_PER_ALBUM))
			cl_args.max_photos = self.MAX_PHOTOS_PER_ALBUM

		if not cl_args.upload and not cl_args.download:
			self.LOG.info('No upload or download specified. Using bidirectional sync.')
			cl_args.upload = True
			cl_args.download = True

		if (cl_args.delete_photos or cl_args.delete_albums) and cl_args.upload and cl_args.download:
			self.LOG.warn('You cannot delete when using bidirectional syncing. Disabling deletion.')
			cl_args.delete_photos = False
			cl_args.delete_albums = False

		if cl_args.force_update and cl_args.upload and cl_args.download:
			self.LOG.warn('You cannot force update when using bidirectional syncing. Disabling forced updates.')
			cl_args.force_update = False

		if cl_args.force_update and not cl_args.update:
			cl_args.update = True

		if len(cl_args.paths) > 1 and (cl_args.download or cl_args.delete_albums):
			self.LOG.warn('You cannot download or delete albums when using more than one directories. Disabling download and/or album deletion.')
			cl_args.download = False
			cl_args.delete_albums = False

		self.cl_args = cl_args
示例#5
0
def parse_arguments():
    """Parses command line arguments.

  Returns:
    An object containing parsed arguments.
  """
    parser = argparse.ArgumentParser()
    parser.add_argument('--log_level',
                        default='INFO',
                        help='The logging threshold.')

    # Data generation arguments
    data_group = parser.add_argument_group('data',
                                           'Data generation arguments.')
    data_group.add_argument(
        '--input_file',
        required=True,
        help='The input file containing data generation options.')
    data_group.add_argument(
        '--topic_id',
        type=int,
        help='Override the topic ID to use during data generation.')
    data_group.add_argument(
        '--topic_name',
        help='Override the topic name to use during data generation.')
    data_group.add_argument(
        '--sample_rate',
        type=float,
        help='Override the sample rate to use during data generation.')
    data_group.add_argument(
        '--spread',
        type=float,
        help='Override the spread to use during data generation.')

    # Database connectivity arguments
    db_group = parser.add_argument_group('database',
                                         'Database connectivity arguments.')
    db_group.add_argument('--db_type',
                          choices=['mysql+mysqlconnector', 'sqlite'],
                          default='sqlite',
                          help='Which database type should be used.')
    db_group.add_argument('--db_user',
                          default='uwsolar',
                          help='The database user.')
    db_group.add_argument('--db_password',
                          default='',
                          help='The database password.')
    db_group.add_argument('--db_host',
                          default=':memory:',
                          help='The database host.')
    db_group.add_argument('--db_name',
                          default='uwsolar',
                          help='The database name.')

    return parser.parse_args()
def parse_args():
    description = f"""
This utility updates existing triage tickets with signatures.
Signatures perform automatic analysis of ticket log files to extract
information that is crucial to help kickstart a ticket triage. Each
signature typically outputs its information in the form of a ticket
comment.


Before running this please make sure you have -

1. A jira username and password -
    Username: You can find it in {JIRA_SERVER}/secure/ViewProfile.jspa
    Password: Simply your sso.redhat.com password

2. A RedHat VPN connection - this is required to access ticket log files

3. A Python virtualenv with all the requirements.txt installed
   (you can also install them without a virtualenv if you wish).

You can run this script without affecting the tickets by using the --dry-run flag
""".strip()

    signature_names = [s.__name__ for s in SIGNATURES]
    parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawDescriptionHelpFormatter)

    login_group = parser.add_argument_group(title="Login options")
    login_args = login_group.add_mutually_exclusive_group()
    login_args.add_argument("--netrc", default="~/.netrc", required=False, help="netrc file")
    login_args.add_argument("-up", "--user-password", required=False, help="Username and password in the format of user:pass")

    selectors_group = parser.add_argument_group(title="Issues selection")
    selectors = selectors_group.add_mutually_exclusive_group(required=True)
    selectors.add_argument("-r", "--recent-issues", action='store_true', help="Handle recent (30 days) Triaging Tickets")
    selectors.add_argument("-a", "--all-issues", action='store_true', help="Handle all Triaging Tickets")
    selectors.add_argument("-i", "--issue", required=False, help="Triage issue key")

    parser.add_argument("-u", "--update", action="store_true", help="Update ticket even if signature already exist")
    parser.add_argument("-v", "--verbose", action="store_true", help="Output verbose logging")

    dry_run_group = parser.add_argument_group(title="Dry run options")
    dry_run_args = dry_run_group.add_mutually_exclusive_group()
    dry_run_msg = "Dry run. Don't update tickets. Write output to"
    dry_run_args.add_argument("-d", "--dry-run", action="store_true", help=f"{dry_run_msg} stdout")
    dry_run_args.add_argument("-t", "--dry-run-temp", action="store_true", help=f"{dry_run_msg} a temp file")

    parser.add_argument("-us", "--update-signature", action='append', choices=signature_names,
                        help="Update tickets with only the signatures specified")

    args = parser.parse_args()

    config_logger(args.verbose)

    return args
示例#7
0
def prep_argparse():
    '''
    (None) -> None
    Example command line arguments for argparse. Note that argparse provides
    a default -h | --help option.
    See https://docs.python.org/2/howto/argparse.html for more.
    '''
    parser = Parser(description='A script to remove inactive users from ' +
                                'your Twitter following list. ' +
                                'Requirements: Set up your API access ' +
                                'tokens by following the instructions at ' +
                                'https://dev.twitter.com/oauth/overview/' +
                                'application-owner-access-tokens. ' +
                                'Any dependencies that your Python ' +
                                'installation says it requires when you ' +
                                'try running this script are best installed ' +
                                'with pip, eg; sudo pip install twitter')
    # optional arguments to specify cut off date for accounts
    parser.add_argument('-y', '--years', help='specify idle account cutoff ' +
                        'in years')
    parser.add_argument('-m', '--months', help='specify idle account cutoff' +
                        'in months')
    # twitter API tokens are collected in an argument group
    group = parser.add_argument_group()
    group.add_argument('-t', '--token',
                       help='twitter API access token', required=True)
    group.add_argument('-tk', '--tokenkey',
                       help='twitter API token secret', required=True)
    group.add_argument('-ck', '--conkey',
                       help='twitter API consumer key', required=True)
    group.add_argument('-cs', '--consecret',
                       help='twitter API consumer secret', required=True)
    args = parser.parse_args()
    return args
示例#8
0
def parse_args():
    parser = argparse.ArgumentParser()

    # Options
    group = parser.add_argument_group('General options')
    group.add_argument('-u',
                       '--user',
                       dest='user',
                       default='root',
                       help='Database user')
    group.add_argument('-p',
                       '--password',
                       dest='password',
                       default='',
                       help='Database password')
    group.add_argument('-d',
                       '--database',
                       dest='database',
                       help='Database name')
    group.add_argument('--host',
                       dest='host',
                       default='localhost',
                       help='Database host')
    group.add_argument('--port',
                       dest='port',
                       default='3306',
                       help='Database host port')
    group.add_argument('--clear',
                       action='store_true',
                       help='Delete database contents')

    parser.add_argument('logdir',
                        help='Directory where Apache logs are stored')

    return parser.parse_args()
示例#9
0
def parse_args():
    parser = ArgumentParser(usage="Usage: '%(prog)s [options] <url> <site_id>")

    # Database options
    group = parser.add_argument_group('Database options')
    group.add_argument('-u',
                       '--user',
                       dest='db_user',
                       help='Database user name',
                       default='root')
    group.add_argument('-p',
                       '--password',
                       dest='db_password',
                       help='Database user password',
                       default='')
    group.add_argument('-d',
                       dest='db_name',
                       required=True,
                       help='Name of the database where data will be stored')
    group.add_argument(
        '--host',
        dest='db_hostname',
        help='Name of the host where the database server is running',
        default='localhost')
    group.add_argument(
        '--port',
        dest='db_port',
        help='Port of the host where the database server is running',
        default='3306')

    # Piwik options
    group = parser.add_argument_group('Piwik options')
    group.add_argument('--start-date', dest='start_date', required=True)
    group.add_argument('--end-date', dest='end_date', default='today')
    group.add_argument('--key',
                       dest='key',
                       required=True,
                       help='Piwik auth key')

    # Positional arguments
    parser.add_argument('url', help='Piwik server URL')
    parser.add_argument('site_id', help='Identifier of the site')

    # Parse arguments
    args = parser.parse_args()

    return args
示例#10
0
def main(argv):
    """Generate graphs based on commandline options."""
    def date_from_string(date_string):
        return dateutil.parser.parse(date_string).date()

    parser = argparse.ArgumentParser()
    parser.add_argument("-v", "--version", help="print the program's version", action='version', version=format_version(sys.argv[0]))
    parser.add_argument("-t", "--trace", help="Turn on debug tracing", type=int, default=0)
    parser.add_argument("-S", "--save", help="Save graphs to images files.", action="store_true", default=False)
    # stat types to operate on
    stats_group = parser.add_argument_group('Statistics', 'Graph statistics over a period of time')
    stats_group.add_argument("-A", "--all", help="Graph data for all enabled statistics.", action='store_const', dest='stats', const=gc_config.enabled_stats(), default=[])
    stats_group.add_argument("-m", "--monitoring", help="Graph monitoring data.", dest='stats', action='append_const', const=Statistics.monitoring)
    stats_group.add_argument("-r", "--hr", help="Graph heart rate data.", dest='stats', action='append_const', const=Statistics.rhr)
    stats_group.add_argument("-s", "--steps", help="Graph steps data.", dest='stats', action='append_const', const=Statistics.sleep)
    stats_group.add_argument("-w", "--weight", help="Graph weight data.", dest='stats', action='append_const', const=Statistics.weight)
    stats_group.add_argument("-p", "--period", help="Graph the latest data.", dest='period', type=int, default=None)
    daily_group = parser.add_argument_group('Daily')
    daily_group.add_argument("-d", "--day", help="Graph composite data for a single day.", type=date_from_string)
    modifiers_group = parser.add_argument_group('Modifiers')
    modifiers_group.add_argument("-l", "--latest", help="Graph the latest data.", dest='days', type=int, default=None)
    args = parser.parse_args()

    if args.trace > 0:
        root_logger.setLevel(logging.DEBUG)
    else:
        root_logger.setLevel(logging.INFO)

    graph = Graph(args.trace, args.save)

    if Statistics.rhr in args.stats:
        graph.graph_activity('hr', args.period, args.days)

    if Statistics.itime in args.stats:
        graph.graph_activity('itime', args.period, args.days)

    if Statistics.steps in args.stats:
        graph.graph_activity('steps', args.period, args.days)

    if Statistics.weight in args.stats:
        graph.graph_activity('weight', args.period, args.days)

    if args.day:
        graph.graph_date(args.day)
示例#11
0
def main(argv):
    help_text = f"""Check in to SWA flights exactly 24 hours in advance. After check-in, an 
email is sent with an itinerary summary and URLs for all boarding passes in a reservation. 
An error email is sent if check-in fails. Reservations with multiple passengers will have 
all passengers checked in and included in the itinerary summary email. The name of any 
passenger in the reservation will work. For roundtrip reservations, all outbound and 
return flights will be queued for check-in. 
SMTP configuration path - {path.dirname(path.realpath(__file__))}/smtp.yml"""

    parser = argparse.ArgumentParser(description=help_text)
    rn = parser.add_argument_group('required named arguments')
    rn.add_argument('-c', '--confirmation', help='Flight confirmation number (record locator)', required=True)
    rn.add_argument('-l', '--lastname', help='Last name', required=True)
    rn.add_argument('-f', '--firstname', help='First name', required=True)
    parser.add_argument('-e', '--email', help='Recipient email address')
    return vars(parser.parse_args())
示例#12
0
    def create_argument_parser(cls):
        """Returns the ReMo argument parser."""

        parser = super().create_argument_parser()

        # Remove --from-date argument from parent parser
        # because it is not needed by this backend
        action = parser._option_string_actions['--from-date']
        parser._handle_conflict_resolve(None, [('--from-date', action)])


        # ReMo options
        group = parser.add_argument_group('ReMo arguments')

        group.add_argument("url", default="https://reps.mozilla.org", nargs='?',
                           help="ReMo URL (default: https://reps.mozilla.org)")

        return parser
示例#13
0
文件: remo.py 项目: willingc/perceval
    def create_argument_parser(cls):
        """Returns the ReMo argument parser."""

        parser = super().create_argument_parser()

        # Remove --from-date argument from parent parser
        # because it is not needed by this backend
        action = parser._option_string_actions['--from-date']
        parser._handle_conflict_resolve(None, [('--from-date', action)])

        # ReMo options
        group = parser.add_argument_group('ReMo arguments')

        group.add_argument("url",
                           default="https://reps.mozilla.org",
                           nargs='?',
                           help="ReMo URL (default: https://reps.mozilla.org)")

        return parser
def parse_args():
    parser = argparse.ArgumentParser()

    # Options
    group = parser.add_argument_group('General options')
    group.add_argument('-u', '--user', dest='user', default='root',
                       help='Database user')
    group.add_argument('-p', '--password', dest='password', default='',
                       help='Database password')
    group.add_argument('-d', '--database', dest='database',
                       help='Database name')
    group.add_argument('--host', dest='host', default='localhost',
                       help='Database host')
    group.add_argument('--port', dest='port', default='3306',
                       help='Database host port')
    group.add_argument('--clear', action='store_true',
                       help='Delete database contents')

    parser.add_argument('logdir', help='Directory where Apache logs are stored')

    return parser.parse_args()
示例#15
0
def create_cli_parser():
    parser = argparse.ArgumentParser(
        description='Generates an email template for an Asana project',
        fromfile_prefix_chars='@')
    parser.add_argument('project_id', help='the asana project id')
    parser.add_argument('api_key', help='your asana api key')
    parser.add_argument(
        '-c', '--completed', type=int, dest='completed_lookback_hours',
        metavar='HOURS',
        help='show non-archived tasks completed within the past hours '
        'specified')
    parser.add_argument(
        '-f', '--filter-tags', nargs='+', dest='tag_filters', default=[],
        metavar='TAG', help='tags to filter tasks on')
    parser.add_argument(
        '-s', '--filter-sections', nargs='+', dest='section_filters',
        default=[], metavar='SECTION', help='sections to filter tasks on')
    parser.add_argument(
        '--html-template', default='Default.html',
        help='a custom template to use for the html portion')
    parser.add_argument(
        '--text-template', default='Default.markdown',
        help='a custom template to use for the plaintext portion')
    email_group = parser.add_argument_group(
        'email', 'arguments for sending emails')
    email_group.add_argument(
        '--mail-server', metavar='HOSTNAME', default='localhost',
        help='the hostname of the mail server to send email from '
        '(default: localhost)')
    email_group.add_argument(
        '--to-addresses', nargs='+', metavar='ADDRESS',
        help="the 'To:' addresses for the outgoing email")
    email_group.add_argument(
        '--cc-addresses', nargs='+', metavar='ADDRESS',
        help="the 'Cc:' addresses for the outgoing email")
    email_group.add_argument(
        '--from-address', metavar='ADDRESS',
        help="the 'From:' address for the outgoing email")

    return parser

def add_signatures(jclient, url, issue_key, should_update=False):
    signatures = [
        ComponentsVersionSignature, HostsStatusSignature,
        HostsExtraDetailSignature, StorageDetailSignature,
        LibvirtRebootFlagSignature
    ]
    for sig in signatures:
        s = sig(jclient)
        s.update_ticket(url, issue_key, should_update=should_update)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    loginGroup = parser.add_argument_group(title="login options")
    loginArgs = loginGroup.add_mutually_exclusive_group()
    loginArgs.add_argument("--netrc",
                           default="~/.netrc",
                           required=False,
                           help="netrc file")
    loginArgs.add_argument(
        "-up",
        "--user-password",
        required=False,
        help="Username and password in the format of user:pass")
    selectorsGroup = parser.add_argument_group(title="Issues selection")
    selectors = selectorsGroup.add_mutually_exclusive_group(required=True)
    selectors.add_argument("-a",
                           "--all_issues",
                           action='store_true',
示例#17
0
class JobError(Exception):
    def __init__(self, http, when):
        self.status = http.status_code
        self.explain = http.text
        self.when = when
    def __str__(self):
        return "Error when "+self.when+" : "+str(self.status)+" "+self.explain



# Main

if __name__=="__main__":
    parser = argparse.ArgumentParser(description='Makes requests to web services, either the Besancon Model of the Galaxy or Gravpot web service', add_help=False)

    serverGrp = parser.add_argument_group('Server options', 'Options for server connection.')
    serverGrp.add_argument('--url', help='URL of the server.', default='https://model.obs-besancon.fr/ws')
    serverGrp.add_argument('--user', help='Username used to connect to server, by default it is the system username ('+os.getlogin()+').', default=os.getlogin())
    serverGrp.add_argument('--pass', dest='passwd', help='Password used to connect to server, by default the password is prompted. Usage of this option is discouraged because it will be visible, for example, with "ps" command and written to history.')

    jobGrp = parser.add_argument_group('Job selection', 'Select the job you want to manipulate. To create a new one type "--create". If this option is not present, it returns your list of jobs and exit ignoring options in "Actions".').add_mutually_exclusive_group()
    jobGrp.add_argument('-c', '--create', help='Create a new job.', action='store_true')
    jobGrp.add_argument('-j', '--job', type=int, help='The job affected by this request.')

    actionGrp = parser.add_argument_group('Actions', 'Configure and make actions on the selected job.')
    actionGrp.add_argument('-p', '--param', help='Set parameter named PARAMNAME with VALUE.', action='append', nargs=2, metavar=('PARAMNAME', 'VALUE'))
    actionGrp.add_argument('--execdur', help='Set maximum execution duration of a job in seconds (this setting can be overridden by server\'s configuration).', type=int)
    actionGrp.add_argument('--tdest', help='Set destruction time of a job (date+time in ISO8601 format, this setting can be overridden by server\'s configuration).')
    actionGrp.add_argument('--run', help='Send job for computation.', action='store_true')
    actionGrp.add_argument('--abort', help='Abort the job.', action='store_true')
    actionGrp.add_argument('--delete', help='Delete the job.', action='store_true')
示例#18
0
import argparse
import datetime
import os
import yaml
from unicodedata import normalize
import flickr
import dateutil.parser

def mkdate(d):
    return dateutil.parser.parse(d)

parser = argparse.ArgumentParser()
parser.add_argument_group()
parser.add_argument('-n','--new')
parser.add_argument('-t', '--title')
parser.add_argument('-d', '--date', type=mkdate)
parser.add_argument('-p', '--photo')


CONTENT_PATH = os.path.join(os.path.dirname(__file__), '../content/')



def slugify(text, encoding=None,
         permitted_chars='abcdefghijklmnopqrstuvwxyz0123456789-'):
    if isinstance(text, str):
        text = text.decode(encoding or 'ascii')
    clean_text = text.strip().replace(' ', '-').lower()
    while '--' in clean_text:
        clean_text = clean_text.replace('--', '-')
    ascii_text = normalize('NFKD', clean_text).encode('ascii', 'ignore')
示例#19
0
	cmd = "heroku pgbackups:capture --app ojo-streamer --expire"
	execute(cmd)
	
def restart_ojo_streamer():
	cmd = "cd ~/Development/ojo/ojo-streamer"
	execute(cmd)
	cmd = "heroku restart --app ojo-streamer"
	execute(cmd)
	
# =======================================================================
# Main
#
if __name__ == '__main__':

	parser 		= argparse.ArgumentParser(description='Generate Forecast Landslide Estimates')
	apg_input 	= parser.add_argument_group('Input')
	
	apg_input.add_argument("-f", "--force", 	action='store_true', help="Forces new products to be generated")
	apg_input.add_argument("-v", "--verbose", 	action='store_true', help="Verbose Flag")

	options 	= parser.parse_args()
	force		= options.force
	verbose		= options.verbose

	removed_old_files_from_local_storage()
	
	#removed__old_files_from_s3()
	#generate_landslide_nowcast()
	
	#backup_ojo_wiki_db()
	#backup_ojo_streamer_db()
示例#20
0
def parse_args(argv):
    """Parse the command line.
    """
    parser = argparse.ArgumentParser(
        description='An interface to tarsnap to manage backups.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-q', action='store_true', dest='quiet', help='be quiet')
    group.add_argument('-v', action='store_true', dest='verbose', help='be verbose')
    # We really want nargs=(1,2), but since this isn't available, we can
    # just asl well support an arbitrary number of values for each -o.
    parser.add_argument('-o', metavar=('name', 'value'), nargs='+',
                        dest='tarsnap_options', default=[], action='append',
                        help='option to pass to tarsnap',)
    parser.add_argument('--config', '-c', help='use the given config file')

    group = parser.add_argument_group(
        description='Instead of using a configuration file, you may define '\
                    'a single job on the command line:')
    group.add_argument('--target', help='target filename for the backup')
    group.add_argument('--sources', nargs='+', help='paths to backup',
                        default=[])
    group.add_argument('--deltas', '-d', metavar='DELTA',
                        type=timedelta_string,
                        help='generation deltas', nargs='+')
    group.add_argument('--dateformat', '-f', help='dateformat')

    for plugin in PLUGINS:
        plugin.setup_arg_parser(parser)

    # This will allow the user to break out of an nargs='*' to start
    # with the subcommand. See http://bugs.python.org/issue9571.
    parser.add_argument('-', dest='__dummy', action="store_true",
                        help=argparse.SUPPRESS)

    subparsers = parser.add_subparsers(
        title="commands", description="commands may offer additional options")
    for cmd_name, cmd_klass in COMMANDS.iteritems():
        subparser = subparsers.add_parser(cmd_name, help=cmd_klass.help,
                                          description=cmd_klass.description,
                                          add_help=False)
        subparser.set_defaults(command=cmd_klass)
        group = subparser.add_argument_group(
            title="optional arguments for this command")
        # We manually add the --help option so that we can have a
        # custom group title, but only show a single group.
        group.add_argument('-h', '--help', action='help',
                           default=argparse.SUPPRESS,
                           help='show this help message and exit')
        cmd_klass.setup_arg_parser(group)

        # Unfortunately, we need to redefine the jobs argument for each
        # command, rather than simply having it once, globally.
        subparser.add_argument(
            'jobs', metavar='job', nargs='*',
            help='only process the given job as defined in the config file')

    # This would be in a group automatically, but it would be shown as
    # the very first thing, while it really should be the last (which
    # explicitly defining the group causes to happen).
    #
    # Also, note that we define this argument for each command as well,
    # and the command specific one will actually be parsed. This is
    # because while argparse allows us to *define* this argument globally,
    # and renders the usage syntax correctly as well, it isn't actually
    # able to parse the thing it correctly (see
    # http://bugs.python.org/issue9540).
    group = parser.add_argument_group(title='positional arguments')
    group.add_argument(
        '__not_used', metavar='job', nargs='*',
        help='only process the given job as defined in the config file')

    args = parser.parse_args(argv)

    # Do some argument validation that would be to much to ask for
    # argparse to handle internally.
    if args.config and (args.target or args.dateformat or args.deltas or
                        args.sources):
        raise ArgumentError('If --config is used, then --target, --deltas, '
                            '--sources and --dateformat are not available')
    if args.jobs and not args.config:
        raise ArgumentError(('Specific jobs (%s) can only be given if a '
                            'config file is used') % ", ".join(args.jobs))
    # The command may want to do some validation regarding it's own options.
    args.command.validate_args(args)

    return args
    # Form ingestion id if not set yet
    if ingestion_id is None:
        ingestion_id = '-'.join([c_options.es_domain, str(es_ts_start), str(es_ts_end)])

    return es_ts_start, es_ts_end, ingestion_id


## MAIN
if __name__ == '__main__':
    start_time = time.time()

    # Setup parser for arguments options
    parser = ArgumentParser()

    # Define groups
    job_group = parser.add_argument_group("job", "Job related parameters")
    hbase_group = parser.add_argument_group("hbase", "HBase related parameters")
    es_group = parser.add_argument_group("es", "ElasticSearch related parameters")

    # Define HBase related arguments
    hbase_group.add_argument("--hbase_host", dest="hbase_host", required=True)
    hbase_group.add_argument("--hbase_port", dest="hbase_port", default=2181)
    hbase_group.add_argument("--hbase_ip", dest="hbase_ip", default="10.1.94.57")
    # BEWARE: these tables should be already created
    # we could just have a table_prefix
    hbase_group.add_argument("--table_sha1", dest="tab_sha1_infos_name", required=True)
    hbase_group.add_argument("--table_update", dest="tab_update_name", required=True)

    # Define ES related options
    es_group.add_argument("--es_host", dest="es_host", required=True)
    es_group.add_argument("--es_domain", dest="es_domain", required=True)
示例#22
0
文件: script.py 项目: zr40/tarsnapper
def parse_args(argv):
    """Parse the command line.
    """
    parser = argparse.ArgumentParser(
        description='An interface to tarsnap to manage backups.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-q',
                       action='store_true',
                       dest='quiet',
                       help='be quiet')
    group.add_argument('-v',
                       action='store_true',
                       dest='verbose',
                       help='be verbose')
    # We really want nargs=(1,2), but since this isn't available, we can
    # just asl well support an arbitrary number of values for each -o.
    parser.add_argument(
        '-o',
        metavar=('name', 'value'),
        nargs='+',
        dest='tarsnap_options',
        default=[],
        action='append',
        help='option to pass to tarsnap',
    )
    parser.add_argument('--config', '-c', help='use the given config file')

    group = parser.add_argument_group(
        description='Instead of using a configuration file, you may define '
        'a single job on the command line:')
    group.add_argument('--target', help='target filename for the backup')
    group.add_argument('--sources',
                       nargs='+',
                       help='paths to backup',
                       default=[])
    group.add_argument('--deltas',
                       '-d',
                       metavar='DELTA',
                       type=timedelta_string,
                       help='generation deltas',
                       nargs='+')
    group.add_argument('--dateformat', '-f', help='dateformat')

    for plugin in PLUGINS:
        plugin.setup_arg_parser(parser)

    # This will allow the user to break out of an nargs='*' to start
    # with the subcommand. See http://bugs.python.org/issue9571.
    parser.add_argument('-',
                        dest='__dummy',
                        action="store_true",
                        help=argparse.SUPPRESS)

    subparsers = parser.add_subparsers(
        title="commands", description="commands may offer additional options")
    for cmd_name, cmd_klass in COMMANDS.items():
        subparser = subparsers.add_parser(cmd_name,
                                          help=cmd_klass.help,
                                          description=cmd_klass.description,
                                          add_help=False)
        subparser.set_defaults(command=cmd_klass)
        group = subparser.add_argument_group(
            title="optional arguments for this command")
        # We manually add the --help option so that we can have a
        # custom group title, but only show a single group.
        group.add_argument('-h',
                           '--help',
                           action='help',
                           default=argparse.SUPPRESS,
                           help='show this help message and exit')
        cmd_klass.setup_arg_parser(group)

        # Unfortunately, we need to redefine the jobs argument for each
        # command, rather than simply having it once, globally.
        subparser.add_argument(
            'jobs',
            metavar='job',
            nargs='*',
            help='only process the given job as defined in the config file')

    # This would be in a group automatically, but it would be shown as
    # the very first thing, while it really should be the last (which
    # explicitly defining the group causes to happen).
    #
    # Also, note that we define this argument for each command as well,
    # and the command specific one will actually be parsed. This is
    # because while argparse allows us to *define* this argument globally,
    # and renders the usage syntax correctly as well, it isn't actually
    # able to parse the thing it correctly (see
    # http://bugs.python.org/issue9540).
    group = parser.add_argument_group(title='positional arguments')
    group.add_argument(
        '__not_used',
        metavar='job',
        nargs='*',
        help='only process the given job as defined in the config file')

    args = parser.parse_args(argv)

    # Do some argument validation that would be to much to ask for
    # argparse to handle internally.
    if args.config and (args.target or args.dateformat or args.deltas
                        or args.sources):
        raise ArgumentError('If --config is used, then --target, --deltas, '
                            '--sources and --dateformat are not available')
    if args.jobs and not args.config:
        raise ArgumentError(('Specific jobs (%s) can only be given if a '
                             'config file is used') % ", ".join(args.jobs))
    # The command may want to do some validation regarding it's own options.
    args.command.validate_args(args)

    return args
示例#23
0
            ws.send('["token:'+access_token+'"]')
        except Exception as exc:
            logging.error('error fetching access token: ' + str(exc))
            expires = None

if __name__ == '__main__':
    from auditing.db.Models import DataCollector, DataCollectorType, Organization, commit, rollback

    print ("\n*****************************************************")
    print ("LoRaWAN Security Framework - %s"%(sys.argv[0]))
    print ("Copyright (c) 2019 IOActive Inc.  All rights reserved.")
    print ("*****************************************************\n")

    parser = argparse.ArgumentParser(description='This script connects to TTN with a TTN account and saves messages into the DB.')

    requiredGroup = parser.add_argument_group('Required arguments')
    
    requiredGroup.add_argument('--user',
                        help='TTN username')
    requiredGroup.add_argument('--pwd',
                        help='TTN password')
    requiredGroup.add_argument('--gw-id',
                        help='The id of the gateway you want to connect. This id has the format eui-0011AABBCCDDEEFF. Make sure to provide this id using the whole string.')
    parser.add_argument('--collector-id',
                                help = 'The ID of the dataCollector. This ID will be associated to the packets saved into DB. eg. --id 1')
    parser.add_argument('--organization-id',
                            help = 'The ID of the dataCollector. This ID will be associated to the packets saved into DB. eg. --id 1',
                            default= None)

    options = parser.parse_args()
示例#24
0
def mkargparse():
    parser = argparse.ArgumentParser(
        description='str2str: string to struct converter')

    gconf = parser.add_argument_group(
        'Loading configuration',
        description=
        '# if nothing specified, config files (*.json) loaded from default --redir'
    )
    ginput = parser.add_argument_group('Input data')
    gfilter = parser.add_argument_group('Filtering')
    gpost = parser.add_argument_group('Post-processing')
    goutput = parser.add_argument_group('Output data')

    # general options
    parser.add_argument('-v',
                        dest='v',
                        default=0,
                        help='verbose',
                        action='count')

    # group conf
    gconf.add_argument('--re',
                       metavar='filename.json',
                       dest='re',
                       help='import regexes from filename ',
                       default=None,
                       action='append')
    gconf.add_argument(
        '--redir',
        metavar='DIR',
        dest='redir',
        help='import all json regex files from this dir (default: {})'.format(
            default_redir),
        default=default_redir)
    gconf.add_argument(
        '--codename',
        metavar='CODENAME',
        dest='codename',
        help='process only this codename(s). For debug or speed-up.',
        default=None,
        action='append')

    # group input
    ginput.add_argument('-f',
                        dest='filename',
                        default=None,
                        help='text file name (default: stdin)',
                        action='append')
    ginput.add_argument('--grep',
                        dest='grep',
                        default=None,
                        help='load only strings which has this text')
    ginput.add_argument(
        '--pload',
        dest='pload',
        metavar="FILENAME.p",
        default=False,
        help='load pre-parsed data as pickle serialized object')
    ginput.add_argument(
        '--jload',
        dest='jload',
        default=False,
        action='store_true',
        help=
        'Do not parse by regexes, load data from pre-parsed json (saved with --jdump before)'
    )

    # group filter
    gfilter.add_argument('--filter',
                         dest='filter',
                         default=None,
                         help='evalidate filtering expression')

    # group postprocessing
    gpost.add_argument('--sort',
                       dest='sort',
                       metavar="FIELD",
                       default=None,
                       help='sort by value of field')
    gpost.add_argument('--head',
                       dest='head',
                       metavar="NUM",
                       default=None,
                       help='leave only first NUM records',
                       type=int)
    gpost.add_argument('--tail',
                       dest='tail',
                       metavar="NUM",
                       default=None,
                       help='leave only last NUM records',
                       type=int)
    gpost.add_argument('--reverse',
                       dest='reverse',
                       default=False,
                       action='store_true',
                       help='Reverse resulting list')
    gpost.add_argument('--rmkey',
                       dest='rmkey',
                       metavar="KEY",
                       default=[],
                       help='delete key (if exists)',
                       action='append')
    gpost.add_argument('--onlykey',
                       dest='onlykey',
                       metavar="KEY",
                       default=[],
                       help='delete all keys except these (multiple)',
                       action='append')
    gpost.add_argument('--group',
                       dest='group',
                       metavar="FIELD",
                       default=None,
                       help='group by same key-field')
    gpost.add_argument('--gname',
                       dest='gname',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: name')
    gpost.add_argument('--glist',
                       dest='glist',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: list')
    gpost.add_argument('--gmin',
                       dest='gmin',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: min')
    gpost.add_argument('--gmax',
                       dest='gmax',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: max')
    gpost.add_argument('--gfirst',
                       dest='gfirst',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: first')
    gpost.add_argument('--glast',
                       dest='glast',
                       metavar="FIELD",
                       default=[],
                       action='append',
                       help='overlap: last')

    # group output
    goutput.add_argument('--dump',
                         dest='dump',
                         default=False,
                         action='store_true',
                         help='out data with python pring (not really useful)')
    goutput.add_argument('--jdump',
                         dest='jdump',
                         default=False,
                         action='store_true',
                         help='out data in json format (list of dicts)')
    goutput.add_argument('--pdump',
                         dest='pdump',
                         metavar="FILENAME.p",
                         default=False,
                         help='save parsed data as pickle serialized object')
    goutput.add_argument('--fmt',
                         dest='fmt',
                         default=None,
                         help='print in format')
    goutput.add_argument('--key',
                         dest='key',
                         default=None,
                         action='append',
                         help='print keys (multiple)')
    goutput.add_argument('--keysep',
                         dest='keysep',
                         default=' ',
                         help='separator for keys')
    goutput.add_argument('--keynames',
                         dest='keynames',
                         default=False,
                         action='store_true',
                         help='print also keynames (for --key)')
    goutput.add_argument('--count',
                         dest='count',
                         default=False,
                         action='store_true',
                         help='print count of records')
    goutput.add_argument('--sum',
                         dest='sum',
                         metavar='FIELD',
                         default=False,
                         help='calculate and print sum of field')
    goutput.add_argument('--avg',
                         dest='avg',
                         metavar='FIELD',
                         default=False,
                         help='calculate and print average of field')

    return parser
示例#25
0
        es_ts_end = max_ts
    # form ingestion id
    ingestion_id = '-'.join([c_options.es_domain, str(es_ts_start), str(es_ts_end)])

    return es_ts_start, es_ts_end, ingestion_id


## MAIN
if __name__ == '__main__':
    start_time = time.time()

    # Setup parser for arguments options
    parser = ArgumentParser()

    # Define groups
    job_group = parser.add_argument_group("job", "Job related parameters")
    hbase_group = parser.add_argument_group("hbase", "HBase related parameters")
    es_group = parser.add_argument_group("es", "ElasticSearch related parameters")
    # we could define parameters for the features extraction...
    # feature column name
    # if we normalize the feature
    # or even features type if we define an extractor that implements get_features_from_URL
    feat_group = parser.add_argument_group("feat", "Features related parameters")

    # Define HBase related arguments
    hbase_group.add_argument("--hbase_host", dest="hbase_host", required=True)
    hbase_group.add_argument("--hbase_port", dest="hbase_port", default=2181)
    hbase_group.add_argument("--hbase_ip", dest="hbase_ip", default="10.1.94.57")
    # BEWARE: these tables should be already created
    # we could just have a table_prefix
    hbase_group.add_argument("--table_sha1", dest="tab_sha1_infos_name", required=True)
示例#26
0
def make_argparser():
    parser = argparse.ArgumentParser(description='Parse a .kml or .kmz track.')
    parser.add_argument(
        'inputs',
        metavar='kml/kmz',
        nargs='+',
        help=
        'The inputs. Can be kml or kmz files, or directories containing them.')
    output = parser.add_argument_group('Output')
    output.add_argument(
        '-k',
        '--key',
        help=
        'Print the value of this key from the metadata. Special keys: "markers" - print the name '
        'of each marker.')
    output.add_argument(
        '-l',
        '--key-len',
        help=
        'Print the string length of this value for this key from the metadata.'
    )
    output.add_argument(
        '-K',
        '--marker-key',
        help='Print the value of this key from marker metadata.')
    output.add_argument('--marker-meta',
                        action='store_true',
                        help='Print all marker metadata.')
    output.add_argument('--marker-keys',
                        action='store_true',
                        help='Print all keys used in marker metadata.')
    output.add_argument(
        '-D',
        '--dump',
        action='store_true',
        help=
        'Extract the xml content, format it, and print to stdout. Warning: The formatted xml may '
        'not be valid or equivalent to the input. Mainly useful for human readers.'
    )
    output.add_argument('-o',
                        '--outfile',
                        default=sys.stdout,
                        type=argparse.FileType('w'),
                        help='Write output to this file instead of stdout.')
    output.add_argument(
        '-r',
        '--ref-points',
        type=load_reference_points,
        help=
        'A YAML file containing GPS coordinates of reference points to use in summaries '
        'describing where the track is.')
    filters = parser.add_argument_group('Filtering')
    filters.add_argument(
        '-F',
        '--filename',
        help=
        'Only match the track matching this filename. Useful when reading a tarball.'
    )
    filters.add_argument(
        '-L',
        '--location',
        nargs=2,
        type=float,
        help=
        'Only match tracks that went near this location, given by a latitude/longitude '
        'pair.')
    filters.add_argument(
        '-d',
        '--distance',
        type=float,
        default=2,
        help=
        'When using --location, only match tracks that went within this many miles of the '
        'location. Default: %(default)s mi')
    filters.add_argument(
        '-s',
        '--start',
        type=int,
        help='Only match tracks that start after this timestamp.')
    filters.add_argument(
        '-e',
        '--end',
        type=int,
        help='Only match tracks that end before this timestamp.')
    filters.add_argument(
        '-M',
        '--marker-filt-meta',
        nargs=2,
        metavar=('KEY', 'VALUE'),
        help=
        'Only match tracks with markers whose metadata matches this key/value pair. '
        'If the metadata value is a list, only one element needs to match your query (case-'
        'insensitive).')
    filters.add_argument(
        '--marker-filt-key',
        help='Only match tracks with markers that use this key.')
    log = parser.add_argument_group('Logging')
    log.add_argument(
        '-g',
        '--log',
        type=argparse.FileType('w'),
        default=sys.stderr,
        help=
        'Print log messages to this file instead of to stderr. Warning: Will overwrite the file.'
    )
    volume = log.add_mutually_exclusive_group()
    volume.add_argument('-q',
                        '--quiet',
                        dest='volume',
                        action='store_const',
                        const=logging.CRITICAL,
                        default=logging.WARNING)
    volume.add_argument('-v',
                        '--verbose',
                        dest='volume',
                        action='store_const',
                        const=logging.INFO)
    volume.add_argument('--debug',
                        dest='volume',
                        action='store_const',
                        const=logging.DEBUG)
    return parser
示例#27
0
    dirgroup = parser.add_mutually_exclusive_group(required=True)
    dirgroup.add_argument(
        'datadir',
        nargs='?',
        default=None,
        help='''Data directory, to be filled with channel subdirectories.''',
    )
    dirgroup.add_argument(
        '-o',
        '--out',
        dest='outdir',
        default=None,
        help='''Data directory, to be filled with channel subdirectories.''',
    )

    mbgroup = parser.add_argument_group(title='mainboard')
    mbgroup.add_argument(
        '-m',
        '--mainboard',
        dest='mboards',
        action='append',
        help='''Mainboard address. (default: first device found)''',
    )
    mbgroup.add_argument(
        '-d',
        '--subdevice',
        dest='subdevs',
        action='append',
        help='''USRP subdevice string. (default: "A:A")''',
    )
def parse_args():
    parser = argparse.ArgumentParser(
        description='Check DDoS events on prefixes protected by Imperva',
        epilog=
        'required env vars: IMPERVA_API_ID, IMPERVA_API_KEY, IMPERVA_ACC_ID')

    parser.add_argument('prefix',
                        action='append',
                        nargs='+',
                        help='ip prefix(es), separated by space')
    parser.add_argument(
        '-w',
        '--watch',
        action='store_true',
        help='keep running and and collect events continuously')
    parser.add_argument('-i',
                        '--interval',
                        type=int,
                        metavar='N',
                        default=300,
                        help='check last N seconds, default: 300')
    parser.add_argument(
        '-o',
        '--overlap',
        type=int,
        metavar='N',
        default=300,
        help='compensate latency, default: 300 sec (watch mode only)')
    parser.add_argument(
        '-t',
        '--threshold',
        type=int,
        metavar='N',
        default=100,
        help='report after N fails, default: 100 (watch mode only)')
    parser.add_argument('-v',
                        '--debug',
                        action='store_true',
                        help='enable debug output')

    prom = parser.add_argument_group(
        'Prometheus metrics (watch mode only, needs prometheus_client module)')
    prom.add_argument('--prom-port',
                      type=int,
                      metavar='PORT',
                      help='export Prometheus metrics on this port')
    prom.add_argument(
        '--prom-init-hours',
        type=int,
        metavar='N',
        default=24,
        help='preload N hours of historical context, default: 24')

    slack = parser.add_argument_group('Slack notifications (watch mode only)')
    slack.add_argument(
        '--slack-room',
        metavar='room-name',
        help=
        'send event notifications to this Slack channel, SLACK_HOOK_URL env must be set'
    )
    slack.add_argument('--slack-team',
                       metavar='<!subteam^ID|@team>',
                       help='mention this team in the Slack notification')
    return parser.parse_args()
def main():
    program_description = u"""
    Command line interface for Amazon Glacier
    """

    # Config parser
    conf_parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=False)

    conf_parser.add_argument("-c",
                             "--conf",
                             default=".glacier",
                             help="Specify config file",
                             metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args()

    # Here we parse config from files in home folder or in current folder
    # We use separate sections for aws and glacier speciffic configs
    aws = glacier = {}
    config = ConfigParser.SafeConfigParser()
    if config.read([args.conf, os.path.expanduser('~/.glacier')]):
        try:
            aws = dict(config.items("aws"))
        except ConfigParser.NoSectionError:
            pass
        try:
            glacier = dict(config.items("glacier"))
        except ConfigParser.NoSectionError:
            pass

    # Join config options with environemnts
    aws = dict(os.environ.items() + aws.items())
    glacier = dict(os.environ.items() + glacier.items())

    # Helper functions
    filt_s = lambda x: x.lower().replace("_", "-")
    filt = lambda x, y="": dict(
        ((y + "-" if y not in filt_s(k) else "") + filt_s(k), v)
        for (k, v) in x.iteritems())
    a_required = lambda x: x not in filt(aws, "aws")
    required = lambda x: x not in filt(glacier)
    a_default = lambda x: filt(aws, "aws").get(x)
    default = lambda x: filt(glacier).get(x)

    # Main parser
    parser = argparse.ArgumentParser(
        parents=[conf_parser],
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        description=program_description)
    subparsers = parser.add_subparsers(
        title='Subcommands',
        help=u"For subcommand help, use: glacier <subcommand> -h")

    group = parser.add_argument_group('aws')
    help_msg_config = u"(Required if you haven't created .glacier config file)"
    group.add_argument('--aws-access-key',
                       required=a_required("aws-access-key"),
                       default=a_default("aws-access-key"),
                       help="Your aws access key " + help_msg_config)
    group.add_argument('--aws-secret-key',
                       required=a_required("aws-secret-key"),
                       default=a_default("aws-secret-key"),
                       help="Your aws secret key " + help_msg_config)
    group = parser.add_argument_group('glacier')
    group.add_argument('--region',
                       required=required("region"),
                       default=default("region"),
                       help="Region where glacier should take action " +
                       help_msg_config)
    group.add_argument('--bookkeeping',
                       required=False,
                       default=default("bookkeeping") and True,
                       action="store_true",
                       help="Should we keep book of all creatated archives.\
                              This requires a SimpleDB account and it's \
                              bookkeeping domain name set")
    group.add_argument('--bookkeeping-domain-name',
                       required=False,
                       default=default("bookkeeping-domain-name"),
                       help="SimpleDB domain name for bookkeeping.")

    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault",
                                           help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob',
                                               help='Describe job')
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser(
        'upload',
        help='Upload an archive',
        formatter_class=argparse.RawTextHelpFormatter)
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('--stdin',
                               help="Input data from stdin, instead of file",
                               action='store_true')
    parser_upload.add_argument('--name',
                               default=None,
                               help='''\
Use the given name as the filename for bookkeeping
purposes. This option is useful in conjunction with
--stdin or when the file being uploaded is a
temporary file.''')
    parser_upload.add_argument('--partsize',
                               type=int,
                               default=-1,
                               help='''\
Part size to use for upload (in Mb). Must
be a power of 2 in the range:
    1 .. 4,294,967,296 (2^0 .. 2^32).
Values that are not a power of 2 will be
adjusted upwards to the next power of 2.

Amazon accepts up to 10,000 parts per upload.

Smaller parts result in more frequent progress
updates, and less bandwidth wasted if a part
needs to be re-transmitted. On the other hand,
smaller parts limit the size of the archive that
can be uploaded. Some examples:

partsize  MaxArchiveSize
    1        1*1024*1024*10000 ~= 10Gb
    4        4*1024*1024*10000 ~= 41Gb
   16       16*1024*1024*10000 ~= 137Gb
  128      128*1024*1024*10000 ~= 1.3Tb

By default, the smallest possible value is used
when the archive size is known ahead of time.
Otherwise (when reading from STDIN) a value of
128 is used.''')
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser(
        'getarchive', help='Get a file by explicitly setting archive id')
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    parser_rmarchive = subparsers.add_parser('rmarchive',
                                             help='Remove archive')
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser(
        'search',
        help='Search SimpleDB database (if it was created). \
                      By default returns contents of vault.')
    parser_search.add_argument('--vault')
    parser_search.add_argument('--search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                                             help='List inventory of a vault')
    parser_inventory.add_argument('--force',
                                  action='store_true',
                                  help="Create a new inventory job")
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    parser_describevault = subparsers.add_parser('describevault',
                                                 help='Describe vault')
    parser_describevault.add_argument('vault')
    parser_describevault.set_defaults(func=describevault)

    parser_listmultiparts = subparsers.add_parser(
        'listmultiparts', help='List multipart uploads')
    parser_listmultiparts.add_argument('vault')
    parser_listmultiparts.set_defaults(func=listmultiparts)

    parser_abortmultipart = subparsers.add_parser(
        'abortmultipart', help='Abort multipart upload')
    parser_abortmultipart.add_argument('vault')
    parser_abortmultipart.add_argument('uploadId')
    parser_abortmultipart.set_defaults(func=abortmultipart)

    # bookkeeping required
    parser_download = subparsers.add_parser(
        'download',
        help='Download a file by searching through SimpleDB cache for it.')
    parser_download.add_argument(
        '--vault', help="Specify the vault in which archive is located.")
    parser_download.add_argument('--out-file')
    parser_download.add_argument('filename', nargs='?')
    parser_download.set_defaults(func=download)

    args = parser.parse_args(remaining_argv)
    args.func(args)
示例#30
0
def main():
	import argparse

	parser = argparse.ArgumentParser(description='schedule backend jobs from different sources.')
	parser.add_argument('-b','--backend', required=True, help='the backend url.')
	parser.add_argument('-a','--account', help='the accountId to use.')
	
	parser.add_argument('-s','--strategy', help='the execution strategy and duration to use.')
	
	# add commands
	cmdGroup = parser.add_argument_group('datasources', 'choose the datasource to take the app(s) from')
	mutalCmds = cmdGroup.add_mutually_exclusive_group(required=True)
	mutalCmds.add_argument('--bundleId', metavar='com.company.app', help='just schedule a given bundleId.')
	mutalCmds.add_argument('--appId', type=int, metavar='trackId', help='the apps appstore id')
	mutalCmds.add_argument('--itunes-top', type=int, default=10, nargs='?', metavar='n', help='use the top N free apps (defaults to 10)')
	mutalCmds.add_argument('--itunes-new', type=int, default=10, nargs='?', metavar='n', help='use the top N new (and free) apps (defaults to 10)')
	mutalCmds.add_argument('--dioscope-new', action='store_true', help='schedule jobs for new versions from apps in Elasticsearch index (max 1000 apps)')
	cmdGroup.add_argument('--itunes-genre', type=int, metavar='id', help='use the given genre only (defaults to all)')
	cmdGroup.add_argument('--itunes-country', type=str, default="de", nargs='?', metavar='countryCode', help='the store country to use (defaults to "de")')
	cmdGroup.add_argument('--dioscope-host', type=str, default="localhost", help='the Elasticsearch host for --dioscope-new')
	cmdGroup.add_argument('--dioscope-index', type=str, default="dioscope", help='the Elasticsearch index for --dioscope-new')


	args = parser.parse_args()
#	logger.debug(args)

	scheduler = Scheduler(args.backend)

	def printRes(res):
		if res:
			logger.info('done!')
		else:
			logger.error('error occured (could be partially done)')

	if 'bundleId' in args and args.bundleId:
		res = scheduler.schedule_bundleId(args.bundleId, account=args.account, country=args.itunes_country, executionStrategy=args.strategy)
		printRes(res)
		return

	if 'appId' in args and args.appId:
		res = scheduler.schedule_appId(args.appId, account=args.account, country=args.itunes_country, executionStrategy=args.strategy)
		printRes(res)
		return

	if 'dioscope_new' in args and args.dioscope_new:
		res = scheduler.schedule_dioscope_new(args.dioscope_host, args.dioscope_index, account=args.account, country=args.itunes_country, executionStrategy=args.strategy)
		printRes(res)
    	return

	genre = ''
	if args.itunes_genre:
		genre = 'genre=%i' % args.itunes_genre

	if 'itunes_top' in args and args.itunes_top:
		url = 'https://itunes.apple.com/%s/rss/topfreeapplications/limit=%i/%s/json' % (args.itunes_country, args.itunes_top, genre)
		res = scheduler.schedule_itunes(url, account=args.account, country=args.itunes_country, executionStrategy=args.strategy)
		printRes(res)
		return

	if 'itunes_new' in args and args.itunes_new:
		url = 'https://itunes.apple.com/%s/rss/newfreeapplications/limit=%i/%s/json' % (args.itunes_country, args.itunes_new, genre)
		res = scheduler.schedule_itunes(url, account=args.account, country=args.itunes_country, executionStrategy=args.strategy)
		printRes(res)
		return
示例#31
0
# TODO Idea: allow there to be different options: whether or not the release
# we're dealing with represents the 'latest' release this might be useful if,
# for instance, we want to to update the details/assets of an older GAP version
# if they change later, or if GAP 4.13.4 is released after GAP 4.14.0, say.
# TODO allow user to specify which branch to make the changes on?
# TODO Let the user choose between pushing directly and making a PR?
# TODO let the user supply a gaproot containing the downloaded and unpacked GAP archive?
# TODO implement some or all of these
#parser.add_argument('-p', '--push', action='store_true',
#                    help='also perform the final push, completing the release')
parser.add_argument('-f',
                    '--force',
                    action='store_true',
                    help='force push to --push-remeote/--branch')

group = parser.add_argument_group('GAP release details')

group.add_argument(
    '-t',
    '--tag',
    type=str,
    help='git tag of the GAP release, e.g. v4.15.2 (default: latest release)')
# TODO Perhaps the default should be to use the date in GAP's "configure" file?
# TODO This should be an option at least!
group.add_argument('-d',
                   '--date',
                   type=str,
                   help='YYYY-MM-DD GAP release date (default: today\'s date)')
group.add_argument(
    '--use-github-date',
    action='store_true',
示例#32
0
def main():
    program_description = u"""
    Command line interface for Amazon Glacier
    """

    # Config parser
    conf_parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False)

    conf_parser.add_argument("-c",
                             "--conf",
                             default=".glacier",
                             help="Specify config file",
                             metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args()

    # Here we parse config from files in home folder or in current folder
    # We use separate sections for aws and glacier speciffic configs
    aws = glacier = {}
    config = ConfigParser.SafeConfigParser()
    if config.read([args.conf, os.path.expanduser('~/.glacier')]):
        try:
            aws = dict(config.items("aws"))
        except ConfigParser.NoSectionError:
            pass
        try:
            glacier = dict(config.items("glacier"))
        except ConfigParser.NoSectionError:
            pass

    # Join config options with environemnts
    aws = dict(os.environ.items() + aws.items())
    glacier = dict(os.environ.items() + glacier.items())

    # Helper functions
    filt_s = lambda x: x.lower().replace("_", "-")
    filt = lambda x, y="": dict(
        ((y + "-" if y not in filt_s(k) else "") + filt_s(k), v)
        for (k, v) in x.iteritems())
    a_required = lambda x: x not in filt(aws, "aws")
    required = lambda x: x not in filt(glacier)
    a_default = lambda x: filt(aws, "aws").get(x)
    default = lambda x: filt(glacier).get(x)

    # Main parser
    parser = argparse.ArgumentParser(parents=[conf_parser],
                                     description=program_description)
    subparsers = parser.add_subparsers(
        title='Subcommands',
        help=u"For subcommand help, use: glacier <subcommand> -h")

    group = parser.add_argument_group('aws')
    help_msg_config = u"(Required if you haven't created .glacier config file)"
    group.add_argument('--aws-access-key',
                       required=a_required("aws-access-key"),
                       default=a_default("aws-access-key"),
                       help="Your aws access key " + help_msg_config)
    group.add_argument('--aws-secret-key',
                       required=a_required("aws-secret-key"),
                       default=a_default("aws-secret-key"),
                       help="Your aws secret key " + help_msg_config)
    group = parser.add_argument_group('glacier')
    group.add_argument('--region',
                       required=required("region"),
                       default=default("region"),
                       help="Region where glacier should take action " +
                       help_msg_config)
    group.add_argument('--bookkeeping',
                       required=False,
                       default=default("bookkeeping") and True,
                       action="store_true",
                       help="Should we keep book of all creatated archives.\
                              This requires a SimpleDB account and it's \
                              bookkeeping domain name set")
    group.add_argument('--bookkeeping-domain-name',
                       required=False,
                       default=default("bookkeeping-domain-name"),
                       help="SimpleDB domain name for bookkeeping.")

    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault",
                                           help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob',
                                               help='Describe job')
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser('upload', help='Upload an archive')
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('--stdin',
                               help="Input data from stdin, instead of file",
                               action='store_true')
    parser_upload.add_argument(
        '--name',
        default=None,
        help='Use the given name as the filename for bookkeeping purposes. \
                               This option is useful in conjunction with --stdin \
                               or when the file being uploaded is a temporary file.'
    )
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser(
        'getarchive', help='Get a file by explicitly setting archive id')
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    parser_rmarchive = subparsers.add_parser('rmarchive',
                                             help='Remove archive')
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser(
        'search',
        help='Search SimpleDB database (if it was created). \
                      By default returns contents of vault.')
    parser_search.add_argument('--vault')
    parser_search.add_argument('--search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                                             help='List inventory of a vault')
    parser_inventory.add_argument('--force',
                                  action='store_true',
                                  help="Create a new inventory job")
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    parser_describevault = subparsers.add_parser('describevault',
                                                 help='Describe vault')
    parser_describevault.add_argument('vault')
    parser_describevault.set_defaults(func=describevault)

    parser_listmultiparts = subparsers.add_parser(
        'listmultiparts', help='List multipart uploads')
    parser_listmultiparts.add_argument('vault')
    parser_listmultiparts.set_defaults(func=listmultiparts)

    parser_abortmultipart = subparsers.add_parser(
        'abortmultipart', help='Abort multipart upload')
    parser_abortmultipart.add_argument('vault')
    parser_abortmultipart.add_argument('uploadId')
    parser_abortmultipart.set_defaults(func=abortmultipart)

    # bookkeeping required
    parser_download = subparsers.add_parser(
        'download',
        help='Download a file by searching through SimpleDB cache for it.')
    parser_download.add_argument(
        '--vault', help="Specify the vault in which archive is located.")
    parser_download.add_argument('--out-file')
    parser_download.add_argument('filename', nargs='?')
    parser_download.set_defaults(func=download)

    args = parser.parse_args(remaining_argv)
    args.func(args)
示例#33
0
def usage() -> argparse.Namespace:
    """Parse the options provided on the command line.

    Returns:
        argparse.Namespace: The parameters provided on the command line.
    """
    parser = argparse.ArgumentParser(add_help=False)
    parser.add_argument('-h',
                        '--help',
                        action='store_true',
                        help='show this help message and exit')
    group = parser.add_argument_group("General", "Simulation general settings")
    group.add_argument("--first-date",
                       help="The first date to be processed. "
                       "Default to the current date",
                       type=datetime_type,
                       default=np.datetime64("now"))
    group.add_argument("--last-date",
                       help="The last date to be processed. "
                       "Default to the last date allowing to cover an entire "
                       "cycle.",
                       type=datetime_type)
    group = parser.add_argument_group("Execution",
                                      "Runtime parameters options ")
    group.add_argument("--debug",
                       action="store_true",
                       help="Put swot simulator in debug mode")
    group.add_argument("--log",
                       metavar='PATH',
                       help="Path to the logbook to use",
                       type=argparse.FileType("w"))
    group.add_argument("--scheduler-file",
                       help="Path to a file with scheduler information to "
                       "launch swot simulator on a cluster. By "
                       "default, use a local cluster.",
                       metavar='PATH',
                       type=argparse.FileType("r"))
    group = parser.add_argument_group("LocalCluster",
                                      "Dask local cluster option")
    group.add_argument("--n-workers",
                       help="Number of workers to start (Default to 1)",
                       type=int,
                       metavar='N',
                       default=1)
    group.add_argument("--processes",
                       help="Whether to use processes (True) or threads "
                       "(False).  Defaults to False",
                       action="store_true")
    group.add_argument("--threads-per-worker",
                       help="Number of threads per each worker. "
                       "(Default to 1)",
                       type=int,
                       metavar='N',
                       default=1)
    group = parser.add_argument_group("Configuration")
    group.add_argument("--template",
                       help="Writes the default configuration of the "
                       "simulator into the file and ends the program.",
                       metavar="PATH",
                       type=argparse.FileType("w"))
    namespace = argparse.Namespace()
    namespace, _ = parser._parse_known_args(sys.argv[1:], namespace)

    def add_settings(parser):
        """Added the argument defining the settings of the simulator."""
        parser.add_argument("settings",
                            type=argparse.FileType('r'),
                            help="Path to the parameters file")

    # Displays help and ends the program.
    if "help" in namespace:
        add_settings(parser)
        parser.print_help()
        parser.exit(0)

    # Checking exclusive options.
    if "scheduler_file" in namespace:
        for item in ["n_workers", "processes", "threads_per_worker"]:
            if item in namespace:
                item = item.replace("_", "-")
                raise RuntimeError(
                    f"--{item}: not allowed with argument --scheduler-file")

    # Write the template configuration file and ends the program
    if "template" in namespace:
        namespace.template.write(settings.template())
        sys.stdout.write(f"""
The template has been written in the file: {namespace.template.name!r}.
""")
        parser.exit(0)

    # The partial analysis of the command line arguments is finished, the last
    # argument is added and parsed one last time.
    add_settings(parser)

    return parser.parse_args()
示例#34
0
def process_cli():
    """
    parses the CLI arguments and returns a domain or
        a file with a list of domains etc.
    :return: dict
    """
    parser = argparse.ArgumentParser(
        description="""DNS Domain Expiration Checker
        A simple python script to display or notify a user by email and/or via Telegram
        about the status of the domain and the expiration date.
        """,
        epilog="(c) AK545 (Andrey Klimov) 2019, e-mail: [email protected]",
        add_help=False
    )
    parent_group = parser.add_argument_group(
        title="Options"
    )
    parent_group.add_argument(
        "-h",
        "--help",
        action="help",
        help="Help"
    )
    parent_group.add_argument(
        "-v",
        "--version",
        action="version",
        help="Display the version number",
        version="%(prog)s version: {}".format(__version__)
    )
    parent_group.add_argument(
        "-f",
        "--file",
        help="Path to the file with the list of domains (default is None)",
        metavar="FILE"
    )
    parent_group.add_argument(
        "-d",
        "--domain",
        help="Domain to check expiration on (default is None)",
        metavar="STRING"
    )
    parent_group.add_argument(
        "-c",
        "--print-to-console",
        action="store_true",
        default=False,
        help="Enable console printing (default is False)"
    )
    parent_group.add_argument(
        "-l",
        "--long-format",
        action="store_true",
        default=False,
        help="Enable detailed print in console (default is False)"
    )
    parent_group.add_argument(
        "-i",
        "--interval-time",
        default=60,
        type=int,
        metavar="SECONDS",
        help="Time to sleep between whois queries (in seconds, default is 60)"
    )
    parent_group.add_argument(
        "-x",
        "--expire-days",
        default=60,
        type=int,
        metavar="DAYS",
        help="Expiration threshold to check against (in days, default is 60)"
    )
    parent_group.add_argument(
        "-s",
        "--cost-per-domain",
        default=0.00,
        type=float,
        metavar="FLOAT",
        help="The cost per one domain (in your currency, default is 0.00)"
    )
    parent_group.add_argument(
        "-t",
        "--use-telegram",
        action="store_true",
        default=False,
        help="Send a warning message through the Telegram (default is False)"
    )
    parent_group.add_argument(
        "-p",
        "--proxy",
        help="Proxy link (for Telegram only), for example: socks5://127.0.0.1:9150 (default is None)",
        metavar="URL"
    )
    parent_group.add_argument(
        "-e",
        "--email-to",
        help=" Send a warning message to email address (default is None)",
        metavar="EMAIL"
    )
    parent_group.add_argument(
        "-ssl",
        "--email-ssl",
        action="store_true",
        default=False,
        help="Send email via SSL (default is False)"
    )
    parent_group.add_argument(
        "-starttls",
        "--email-starttls",
        action="store_true",
        default=False,
        help="Send email via STARTTLS (default is False)"
    )
    parent_group.add_argument(
        "-oe",
        "--use-only-external-whois",
        action="store_true",
        default=False,
        help="Use only external utility whois (default is False)"
    )
    parent_group.add_argument(
        "-ee",
        "--use-extra-external-whois",
        action="store_true",
        default=False,
        help="Use external whois utility for additional analysis (default is False)"
    )
    parent_group.add_argument(
        "-nb",
        "--no-banner",
        action="store_true",
        default=False,
        help="Do not print banner (default is False)"
    )
    return parser
def main():
    # Usage: No arguments give basic statistics. A datetime as an
    # argument gives basic statistics and the number of songs played
    # before and after the given date. The switches -f (--from) and -t
    # (--to) DATE gives a list of the songs played on the date
    # interval given.

    # Parse the commandline.
    parser = argparse.ArgumentParser(description="Print statistics of the clementine database.")
    parser.add_argument(
        "-s",
        "--split",
        nargs=1,
        help=("List how many songs that has been played before " "and after the given date."),
        metavar="DATE",
        action="store",
    )
    list_group = parser.add_argument_group(
        "List songs",
        "Print the songs played between the --from date and the --to date. "
        "If only one of the arguments is given, print either the interval "
        "[--from, --from + 1 day] or [--to - 1 day, --to].",
    )
    list_group.add_argument(
        "-f",
        "--from",
        nargs=1,
        help="Start date",
        metavar="DATE",
        action="store",
        dest="from_",  # This is needed because 'from' is a python keyword.
    )
    list_group.add_argument("-t", "--to", nargs=1, help="End date", metavar="DATE", action="store")

    args = parser.parse_args()

    # Create the database connection.
    with ClementineDb(DB_FILE) as conn:

        # Load the statics from the database and print it.
        conn.get_statistics()
        conn.print_statistics()

        # If the command line options '--from' and/or '--to' is given,
        # print a list of songs that were played in the interval
        # --from - --to. If one of the arguments is missing, print the
        # interval --from - --from + 1 day och --to - 1 day - --to.
        if args.from_:
            start_date = get_timestamp(args.from_)
            if start_date != None:  # Do nothing on an invalid date.
                if args.to:
                    end_date = get_timestamp(args.to)
                else:
                    end_date = start_date + 86400  # +1 day
                if end_date != None:  # Do nothing on an invalid date.
                    when = (start_date, end_date)
                    conn.get_songs_played_on_interval(when)
                    conn.print_song_list()
        elif args.to:
            # Only the end date has been given. Print the interval
            # [end date - 1 day, end date].
            end_date = get_timestamp(args.to)
            if end_date != None:  # Do nothing on an invalid date.
                start_date = end_date - 86400  # -1 day
                when = (start_date, end_date)
                conn.get_songs_played_on_interval(when)
                conn.print_song_list()

        # If the command line option '--split' was given, print the
        # number of songs that were played before and after the
        # supplied date.
        if args.split:
            date = get_timestamp(args.split)
            if date != None:  # Do nothing on an invalid date.
                conn.partition_songs(date)
                conn.print_partitions()
def create_cli_parser():
    parser = argparse.ArgumentParser(
        description="Generates an email template for an Asana project", fromfile_prefix_chars="@"
    )
    parser.add_argument("project_id", help="the asana project id")
    parser.add_argument("api_key", help="your asana api key")
    parser.add_argument(
        "-i", "--skip-inline-css", action="store_false", default=True, help="skip inlining of CSS in rendered HTML"
    )
    parser.add_argument(
        "-c",
        "--completed",
        type=int,
        dest="completed_lookback_hours",
        metavar="HOURS",
        help="show non-archived tasks completed within the past hours " "specified",
    )
    parser.add_argument(
        "-f", "--filter-tags", nargs="+", dest="tag_filters", default=[], metavar="TAG", help="tags to filter tasks on"
    )
    parser.add_argument(
        "-s",
        "--filter-sections",
        nargs="+",
        dest="section_filters",
        default=[],
        metavar="SECTION",
        help="sections to filter tasks on",
    )
    parser.add_argument("--html-template", default="Default.html", help="a custom template to use for the html portion")
    parser.add_argument(
        "--text-template", default="Default.markdown", help="a custom template to use for the plaintext portion"
    )
    email_group = parser.add_argument_group("email", "arguments for sending emails")
    email_group.add_argument(
        "--mail-server",
        metavar="HOSTNAME",
        default="localhost",
        help="the hostname of the mail server to send email from " "(default: localhost)",
    )
    email_group.add_argument(
        "--to-addresses", nargs="+", metavar="ADDRESS", help="the 'To:' addresses for the outgoing email"
    )
    email_group.add_argument(
        "--cc-addresses", nargs="+", metavar="ADDRESS", help="the 'Cc:' addresses for the outgoing email"
    )
    email_group.add_argument("--from-address", metavar="ADDRESS", help="the 'From:' address for the outgoing email")
    email_group.add_argument(
        "--username",
        metavar="ADDRESS",
        default=None,
        help="the username to authenticate to the outgoing (SMTP) mail server over SSL",
    )
    email_group.add_argument(
        "--password",
        metavar="ADDRESS",
        default=None,
        help="the password to authenticate to the outgoing (SMTP) mail server over SSL",
    )

    return parser
示例#37
0
def usage() -> argparse.Namespace:
    """Parse the options provided on the command line.

    Returns:
        argparse.Namespace: The parameters provided on the command line.
    """
    parser = argparse.ArgumentParser()
    parser.add_argument("settings",
                        type=argparse.FileType('r'),
                        help="Path to the parameters file")
    group = parser.add_argument_group("General", "Simulation general settings")
    group.add_argument("--first-date",
                       help="The first date to be processed. "
                       "Default to the current date",
                       type=datetime_type,
                       default=np.datetime64(datetime.date.today()))
    group.add_argument("--last-date",
                       help="The last date to be processed. "
                       "Default to the last date allowing to cover an entire "
                       "cycle.",
                       type=datetime_type)
    group = parser.add_argument_group("Execution",
                                      "Runtime parameters options ")
    group.add_argument("--debug",
                       action="store_true",
                       help="Put swot simulator in debug mode")
    group.add_argument("--log",
                       metavar='PATH',
                       help="Path to the logbook to use",
                       type=argparse.FileType("w"))
    group.add_argument("--scheduler-file",
                       help="Path to a file with scheduler information to "
                       "launch swot simulator on a cluster. By "
                       "default, use a local cluster.",
                       metavar='PATH',
                       type=argparse.FileType("r"))
    group = parser.add_argument_group("LocalCluster",
                                      "Dask local cluster option")
    group.add_argument("--n-workers",
                       help="Number of workers to start (Default to 1)",
                       type=int,
                       metavar='N',
                       default=1)
    group.add_argument("--processes",
                       help="Whether to use processes (True) or threads "
                       "(False).  Defaults to False",
                       action="store_true")
    group.add_argument("--threads-per-worker",
                       help="Number of threads per each worker. "
                       "(Default to 1)",
                       type=int,
                       metavar='N',
                       default=1)
    namespace = argparse.Namespace()
    namespace, _ = parser._parse_known_args(sys.argv[1:], namespace)
    if "scheduler_file" in namespace:
        for item in ["n_workers", "processes", "threads_per_worker"]:
            if item in namespace:
                item = item.replace("_", "-")
                raise RuntimeError(
                    f"--{item}: not allowed with argument --scheduler-file")
    return parser.parse_args()
示例#38
0
    """
    pool = mp.Pool(nprocs)
    for i in range(nprocs):
        pool.apply_async(plot_lc_table,
                         args=[flux_table, stats_table],
                         kwds={
                             'start': i,
                             'stride': nprocs
                         })
    pool.close()
    pool.join()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    group1 = parser.add_argument_group("Create a variability plot")
    group1.add_argument("--ftable",
                        dest='ftable',
                        type=str,
                        default=None,
                        help="flux table")
    group1.add_argument("--stable",
                        dest='stable',
                        type=str,
                        default=None,
                        help="stats table")
    group1.add_argument("--plot",
                        dest='plotfile',
                        type=str,
                        default=None,
                        help="output plot")
def main():
    program_description = u"""
    Command line interface for Amazon Glacier
    """

    # Config parser
    conf_parser = argparse.ArgumentParser(
                                formatter_class=argparse.RawDescriptionHelpFormatter,
                                add_help=False)

    conf_parser.add_argument("-c", "--conf", default=".glacier",
                        help="Specify config file", metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args()

    # Here we parse config from files in home folder or in current folder
    # We use separate sections for aws and glacier speciffic configs
    aws = glacier = {}
    config = ConfigParser.SafeConfigParser()
    if config.read([args.conf, os.path.expanduser('~/.glacier')]):
        try:
            aws = dict(config.items("aws"))
        except ConfigParser.NoSectionError:
            pass
        try:
            glacier = dict(config.items("glacier"))
        except ConfigParser.NoSectionError:
            pass

    # Join config options with environemnts
    aws= dict(os.environ.items() + aws.items() )
    glacier= dict(os.environ.items() + glacier.items() )

    # Helper functions
    filt_s= lambda x: x.lower().replace("_","-")
    filt = lambda x,y="": dict(((y+"-" if y not in filt_s(k) else "") +
                             filt_s(k), v) for (k, v) in x.iteritems())
    a_required = lambda x: x not in filt(aws,"aws")
    required = lambda x: x not in filt(glacier)
    a_default = lambda x: filt(aws, "aws").get(x)
    default = lambda x: filt(glacier).get(x)

    # Main parser
    parser = argparse.ArgumentParser(parents=[conf_parser],
                                     description=program_description)
    subparsers = parser.add_subparsers(title='Subcommands',
                                       help=u"For subcommand help, use: glacier <subcommand> -h")

    group = parser.add_argument_group('aws')
    help_msg_config = u"(Required if you haven't created .glacier config file)"
    group.add_argument('--aws-access-key',
                        required= a_required("aws-access-key"),
                        default= a_default("aws-access-key"),
                        help="Your aws access key " + help_msg_config)
    group.add_argument('--aws-secret-key',
                        required=a_required("aws-secret-key"),
                        default=a_default("aws-secret-key"),
                        help="Your aws secret key " + help_msg_config)
    group = parser.add_argument_group('glacier')
    group.add_argument('--region',
                        required=required("region"),
                        default=default("region"),
                        help="Region where glacier should take action " + help_msg_config)
    group.add_argument('--bookkeeping',
                        required= False,
                        default= default("bookkeeping") and True,
                        action= "store_true",
                        help="Should we keep book of all creatated archives.\
                              This requires a SimpleDB account and it's \
                              bookkeeping domain name set")
    group.add_argument('--bookkeeping-domain-name',
                        required= False,
                        default= default("bookkeeping-domain-name"),
                        help="SimpleDB domain name for bookkeeping.")

    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault", help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob', help='Describe job')
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser('upload', help='Upload an archive')
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('--stdin',
                                help="Input data from stdin, instead of file",
                                action='store_true')
    parser_upload.add_argument('--name', default=None,
                                help='Use the given name as the filename for bookkeeping purposes. \
                               This option is useful in conjunction with --stdin \
                               or when the file being uploaded is a temporary file.')
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser('getarchive',
                help='Get a file by explicitly setting archive id')
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    parser_rmarchive = subparsers.add_parser('rmarchive', help='Remove archive')
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser('search',
                help='Search SimpleDB database (if it was created). \
                      By default returns contents of vault.')
    parser_search.add_argument('--vault')
    parser_search.add_argument('--search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                help='List inventory of a vault')
    parser_inventory.add_argument('--force', action='store_true',
                                 help="Create a new inventory job")
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    parser_describevault = subparsers.add_parser('describevault', help='Describe vault')
    parser_describevault.add_argument('vault')
    parser_describevault.set_defaults(func=describevault)

    parser_listmultiparts = subparsers.add_parser('listmultiparts', help='List multipart uploads')
    parser_listmultiparts.add_argument('vault')
    parser_listmultiparts.set_defaults(func=listmultiparts)

    parser_abortmultipart = subparsers.add_parser('abortmultipart', help='Abort multipart upload')
    parser_abortmultipart.add_argument('vault')
    parser_abortmultipart.add_argument('uploadId')
    parser_abortmultipart.set_defaults(func=abortmultipart)


    # bookkeeping required
    parser_download = subparsers.add_parser('download',
            help='Download a file by searching through SimpleDB cache for it.')
    parser_download.add_argument('--vault',
            help="Specify the vault in which archive is located.")
    parser_download.add_argument('--out-file')
    parser_download.add_argument('filename', nargs='?')
    parser_download.set_defaults(func=download)

    args = parser.parse_args(remaining_argv)
    args.func(args)
def main():
    program_description = u"""
    Command line interface for Amazon Glacier
    """

    # Config parser
    conf_parser = argparse.ArgumentParser(
                                formatter_class=argparse.ArgumentDefaultsHelpFormatter,
                                add_help=False)

    conf_parser.add_argument("-c", "--conf", default=".glacier",
                        help="Specify config file", metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args()

    # Here we parse config from files in home folder or in current folder
    # We use separate sections for aws and glacier speciffic configs
    aws = glacier = {}
    config = ConfigParser.SafeConfigParser()
    if config.read([args.conf, os.path.expanduser('~/.glacier')]):
        try:
            aws = dict(config.items("aws"))
        except ConfigParser.NoSectionError:
            pass
        try:
            glacier = dict(config.items("glacier"))
        except ConfigParser.NoSectionError:
            pass

    # Join config options with environemnts
    aws= dict(os.environ.items() + aws.items() )
    glacier= dict(os.environ.items() + glacier.items() )

    # Helper functions
    filt_s= lambda x: x.lower().replace("_","-")
    filt = lambda x,y="": dict(((y+"-" if y not in filt_s(k) else "") +
                             filt_s(k), v) for (k, v) in x.iteritems())
    a_required = lambda x: x not in filt(aws,"aws")
    required = lambda x: x not in filt(glacier)
    a_default = lambda x: filt(aws, "aws").get(x)
    default = lambda x: filt(glacier).get(x)

    # Main parser
    parser = argparse.ArgumentParser(parents=[conf_parser],
                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter,
                                     description=program_description)
    subparsers = parser.add_subparsers(title='Subcommands',
                                       help=u"For subcommand help, use: glacier <subcommand> -h")

    group = parser.add_argument_group('aws')
    help_msg_config = u"(Required if you haven't created .glacier config file)"
    group.add_argument('--aws-access-key',
                        required= a_required("aws-access-key"),
                        default= a_default("aws-access-key"),
                        help="Your aws access key " + help_msg_config)
    group.add_argument('--aws-secret-key',
                        required=a_required("aws-secret-key"),
                        default=a_default("aws-secret-key"),
                        help="Your aws secret key " + help_msg_config)
    group = parser.add_argument_group('glacier')
    group.add_argument('--region',
                        required=required("region"),
                        default=default("region"),
                        help="Region where glacier should take action " + help_msg_config)
    group.add_argument('--bookkeeping',
                        required= False,
                        default= default("bookkeeping") and True,
                        action= "store_true",
                        help="Should we keep book of all creatated archives.\
                              This requires a SimpleDB account and it's \
                              bookkeeping domain name set")
    group.add_argument('--bookkeeping-domain-name',
                        required= False,
                        default= default("bookkeeping-domain-name"),
                        help="SimpleDB domain name for bookkeeping.")

    parser_lsvault = subparsers.add_parser("lsvault", help="List vaults")
    parser_lsvault.set_defaults(func=lsvault)

    parser_mkvault = subparsers.add_parser("mkvault", help="Create a new vault")
    parser_mkvault.add_argument('vault')
    parser_mkvault.set_defaults(func=mkvault)

    parser_rmvault = subparsers.add_parser('rmvault', help='Remove vault')
    parser_rmvault.add_argument('vault')
    parser_rmvault.set_defaults(func=rmvault)

    parser_listjobs = subparsers.add_parser('listjobs', help='List jobs')
    parser_listjobs.add_argument('vault')
    parser_listjobs.set_defaults(func=listjobs)

    parser_describejob = subparsers.add_parser('describejob', help='Describe job')
    parser_describejob.add_argument('vault')
    parser_describejob.add_argument('jobid')
    parser_describejob.set_defaults(func=describejob)

    parser_upload = subparsers.add_parser('upload', help='Upload an archive',
                               formatter_class=argparse.RawTextHelpFormatter)
    parser_upload.add_argument('vault')
    parser_upload.add_argument('filename')
    parser_upload.add_argument('--stdin',
                                help="Input data from stdin, instead of file",
                                action='store_true')
    parser_upload.add_argument('--name', default=None,
                                help='''\
Use the given name as the filename for bookkeeping
purposes. This option is useful in conjunction with
--stdin or when the file being uploaded is a
temporary file.''')
    parser_upload.add_argument('--partsize', type=int, default=-1,
                               help='''\
Part size to use for upload (in Mb). Must
be a power of 2 in the range:
    1 .. 4,294,967,296 (2^0 .. 2^32).
Values that are not a power of 2 will be
adjusted upwards to the next power of 2.

Amazon accepts up to 10,000 parts per upload.

Smaller parts result in more frequent progress
updates, and less bandwidth wasted if a part
needs to be re-transmitted. On the other hand,
smaller parts limit the size of the archive that
can be uploaded. Some examples:

partsize  MaxArchiveSize
    1        1*1024*1024*10000 ~= 10Gb
    4        4*1024*1024*10000 ~= 41Gb
   16       16*1024*1024*10000 ~= 137Gb
  128      128*1024*1024*10000 ~= 1.3Tb

By default, the smallest possible value is used
when the archive size is known ahead of time.
Otherwise (when reading from STDIN) a value of
128 is used.''')
    parser_upload.add_argument('description', nargs='*')
    parser_upload.set_defaults(func=putarchive)

    parser_getarchive = subparsers.add_parser('getarchive',
                help='Get a file by explicitly setting archive id')
    parser_getarchive.add_argument('vault')
    parser_getarchive.add_argument('archive')
    parser_getarchive.add_argument('filename', nargs='?')
    parser_getarchive.set_defaults(func=getarchive)

    parser_rmarchive = subparsers.add_parser('rmarchive', help='Remove archive')
    parser_rmarchive.add_argument('vault')
    parser_rmarchive.add_argument('archive')
    parser_rmarchive.set_defaults(func=deletearchive)

    parser_search = subparsers.add_parser('search',
                help='Search SimpleDB database (if it was created). \
                      By default returns contents of vault.')
    parser_search.add_argument('--vault')
    parser_search.add_argument('--search_term')
    parser_search.set_defaults(func=search)

    parser_inventory = subparsers.add_parser('inventory',
                help='List inventory of a vault')
    parser_inventory.add_argument('--force', action='store_true',
                                 help="Create a new inventory job")
    parser_inventory.add_argument('vault')
    parser_inventory.set_defaults(func=inventory)

    parser_describevault = subparsers.add_parser('describevault', help='Describe vault')
    parser_describevault.add_argument('vault')
    parser_describevault.set_defaults(func=describevault)

    parser_listmultiparts = subparsers.add_parser('listmultiparts', help='List multipart uploads')
    parser_listmultiparts.add_argument('vault')
    parser_listmultiparts.set_defaults(func=listmultiparts)

    parser_abortmultipart = subparsers.add_parser('abortmultipart', help='Abort multipart upload')
    parser_abortmultipart.add_argument('vault')
    parser_abortmultipart.add_argument('uploadId')
    parser_abortmultipart.set_defaults(func=abortmultipart)


    # bookkeeping required
    parser_download = subparsers.add_parser('download',
            help='Download a file by searching through SimpleDB cache for it.')
    parser_download.add_argument('--vault',
            help="Specify the vault in which archive is located.")
    parser_download.add_argument('--out-file')
    parser_download.add_argument('filename', nargs='?')
    parser_download.set_defaults(func=download)

    args = parser.parse_args(remaining_argv)
    args.func(args)
示例#41
0
def parse_args():
    #Version
    parser = ArgumentParser(
        description=
        'Basecall, demultiplex and assemble reads from ONT sequencing')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='%(prog)s ' + 'v.1.0.0')

    #Argsgroups
    input_args = parser.add_argument_group('Input options (required)')
    output_args = parser.add_argument_group('Output options (required)')
    optional_args = parser.add_argument_group('Optional flags')
    advanced_args = parser.add_argument_group('Advanced options')

    #Input
    input_args.add_argument(
        '-i',
        '--input_dir',
        type=pathlib.Path,
        required=True,
        help=
        'Input directory, which will be recursively searched for fast5-files.')
    input_args.add_argument(
        '-b',
        '--basecalling_model',
        type=str,
        required=True,
        choices=["r9.4_fast", "r9.4_hac", "r9.5", "r10_fast", "r10_hac"],
        help=
        'Indicate which basecalling mode to use. In most cases you probably want to use a HAC option.'
    )
    input_args.add_argument(
        '-k',
        '--barcode_kit',
        type=str,
        required=True,
        choices=[
            "none", "native_1-12", "native_13-24", "native_1-24", "rapid_1-12"
        ],
        help='Indicate which barcode-kits were used, if any.')

    #Output - currently writes to same dir as input
    output_args.add_argument('-o',
                             '--outdir',
                             type=pathlib.Path,
                             required=False,
                             default='.',
                             help='Output directory for all output files.')

    #Options
    optional_args.add_argument(
        '-f',
        '--filtlong',
        type=str,
        choices=["on", "off"],
        required=False,
        default="on",
        help='Subsample fastq-files with Filtlong? Default: on.')
    optional_args.add_argument(
        '-a',
        '--assemble',
        action='store_true',
        required=False,
        help=
        'Assemble the fastq-files from ONT only with unicycler. Default: off.')
    optional_args.add_argument(
        '--resume',
        action='store_true',
        required=False,
        help=
        'Use this flag if your first run was interrupted and you want to resume. Default: off.'
    )
    optional_args.add_argument(
        '--cpu',
        action='store_true',
        required=False,
        help=
        'If GPU is busy, use CPU with this flag. Will use 4 threads and 6 callers. Default: GPU.'
    )

    #Advanced options
    advanced_args.add_argument(
        '--chunks_per_runner',
        type=str,
        required=False,
        help='Advanced option. Change chunks per runner. Default = 300')

    return parser.parse_args()
示例#42
0
def get_options():
    description = "This Python script is a wrapper for Exegol. It can be used to easily manage Exegol on your machine."

    examples = {
        "install (↓ ~8GB max):": "exegol install",
        "check image updates:": "exegol info",
        "get a shell:\t": "exegol start",
        "get a tmux shell:": "exegol --shell tmux start",
        "use wifi/bluetooth:": "exegol --privileged start",
        "use a proxmark:": "exegol --device /dev/ttyACM0 start",
        "use a LOGITacker:": "exegol --device /dev/ttyACM0 start",
        "use an ACR122u:": "exegol --device /dev/bus/usb/ start",
        "use a Crazyradio PA:": "exegol --device /dev/bus/usb/ start",
    }

    epilog = "{}Examples:{}\n".format(GREEN, END)
    for example in examples.keys():
        epilog += "  {}\t{}\n".format(example, examples[example])

    actions = {
        "start":
        "automatically start, resume, create or enter an Exegol container",
        "stop": "stop an Exegol container in a saved state",
        "install":
        "install Exegol image (build or pull depending on the chosen install --mode)",
        "update":
        "update Exegol image (build or pull depending on the chosen update --mode)",
        "remove": "remove Exegol image(s) and/or container(s)",
        "info":
        "print info on containers and local & remote images (name, size, state, ...)",
        "version": "print current version",
    }

    actions_help = ""
    for action in actions.keys():
        actions_help += "{}\t\t{}\n".format(action, actions[action])

    modes = {
        "release":
        "(default) downloads a pre-built image (from DockerHub) (faster)",
        "sources":
        "builds from the local sources in {} (pull from GitHub then docker build, local edits won't be overwritten)"
        .format(EXEGOL_PATH)
    }

    modes_help = ""
    for mode in modes.keys():
        modes_help += "{}\t\t{}\n".format(mode, modes[mode])

    parser = argparse.ArgumentParser(
        description=description,
        epilog=epilog,
        formatter_class=argparse.RawTextHelpFormatter,
    )

    # Required arguments
    parser._positionals.title = "{}Required arguments{}".format(
        "\033[1;32m", END)
    parser.add_argument("action", choices=actions.keys(), help=actions_help)
    parser.add_argument(
        "-k",
        "--insecure",
        dest="verify",
        action="store_false",
        default=True,
        required=False,
        help=
        "Allow insecure server connections for web requests (default: False)",
    )

    # Optional arguments
    parser._optionals.title = "{}Optional arguments{}".format(BLUE, END)
    logging = parser.add_mutually_exclusive_group()
    logging.add_argument(
        "-v",
        "--verbose",
        dest="verbosity",
        action="count",
        default=0,
        help="verbosity level (-v for verbose, -vv for debug)",
    )
    logging.add_argument(
        "-q",
        "--quiet",
        dest="quiet",
        action="store_true",
        default=False,
        help="show no information at all",
    )

    # Install/update options
    install_update = parser.add_argument_group(
        "{}Install/update options{}".format(BLUE, END))
    install_update.add_argument(
        "-m",
        "--mode",
        dest="mode",
        action="store",
        choices=modes.keys(),
        default="release",
        help=modes_help,
    )

    # Default start options
    default_start = parser.add_argument_group(
        "{}Default start options{}".format(BLUE, END),
        description=
        'The following options are enabled by default. They can all be disabled with the advanced option "--no-default". They can then be enabled back separately, for example "exegol --no-default --X11 start"',
    )
    default_start.add_argument(
        "-x",
        "--X11",
        dest="X11",
        action="store_true",
        help="enable display sharing to run GUI-based applications",
    )
    default_start.add_argument(
        "--host-network",
        dest="host_network",
        action="store_true",
        help=
        "let the container share the host's networking namespace (the container shares the same interfaces and has the same adresses, needed for mitm6)",
    )
    default_start.add_argument(
        "--bind-resources",
        dest="bind_resources",
        action="store_true",
        help=
        "mount the /opt/resources of the container in a subdirectory of host\'s {}"
        .format(SHARED_RESOURCES))
    default_start.add_argument(
        "-s",
        "--shell",
        dest="shell",
        action="store",
        choices={"zsh", "bash", "tmux"},
        default="zsh",
        help="select shell to start when entering Exegol (Default: zsh)",
    )

    # Advanced start options
    advanced_start = parser.add_argument_group(
        "{}Advanced start/stop/reset options{}".format(BLUE, END))
    advanced_start.add_argument(
        "-t",
        "--container-tag",
        dest="containertag",
        action="store",
        help="tag to use in the container name",
    )
    advanced_start.add_argument(
        "--no-default",
        dest="no_default",
        action="store_true",
        default=False,
        help="disable the default start options (e.g. --X11, --host-network)",
    )
    advanced_start.add_argument(
        "--privileged",
        dest="privileged",
        action="store_true",
        default=False,
        help=
        "(dangerous) give extended privileges at the container creation (e.g. needed to mount things, to use wifi or bluetooth)",
    )
    advanced_start.add_argument(
        "-d",
        "--device",
        dest="device",
        action="store",
        help="add a host device at the container creation",
    )
    advanced_start.add_argument(
        "-c",
        "--custom-options",
        dest="custom_options",
        action="store",
        default="",
        help="specify custom options for the container creation",
    )
    advanced_start.add_argument(
        "-cwd",
        "--cwd-mount",
        dest="mount_current_dir",
        action="store_true",
        help="mount current dir to container's /workspace",
    )

    options = parser.parse_args()

    if not options.no_default:
        options.X11 = True
        options.host_network = True
        options.bind_resources = True
    options.action = options.action.replace("-", "")
    if options.action == "update":
        options.action = "install"
    return options
示例#43
0
    def set_config_options(usage):
        """
        Take command-line arguments and options from the configuration file.

        Command-line keyword arguments only, not positional -- breaking
        change November 2013.

        In case of conflict, command-line options are meant to override
        options specified in config file.
        """

        parser = ArgumentParser(description=info.DESCRIPTION)
        parser.add_argument('--version', action='version', version=info.VERSION)

        # General options
        parser.add_argument('-b', '--backend', dest='backend',
                            help='Backend used to fetch issues', default=None)
        parser.add_argument('--backend-user', dest='backend_user',
                            help='Backend user', default=None)
        parser.add_argument('--backend-password', dest='backend_password',
                            help='Backend password', default=None)
        parser.add_argument('--backend-token', dest='backend_token',
                            help='Backend authentication token', default=None)
        parser.add_argument('-c', '--cfg', dest='cfgfile',
                            help='Use a custom configuration file', default=None)
        parser.add_argument('-d', '--delay', type=int, dest='delay',
                            help='Delay in seconds betweeen petitions to avoid been banned',
                            default='5')
        parser.add_argument('-g', '--debug', action='store_true', dest='debug',
                            help='Enable debug mode', default=False)
        parser.add_argument('--gerrit-project', dest='gerrit_project',
                            help='Project to be analyzed (gerrit backend)',
                            default=None)
        parser.add_argument('-i', '--input', choices=['url', 'db'],
                            dest='input', help='Input format', default='url')
        parser.add_argument('-o', '--output', choices=['db'],
                            dest='output', help='Output format', default='db')
        parser.add_argument('-p', '--path', dest='path',
                            help='Path where downloaded URLs will be stored',
                            default=None)
        parser.add_argument('-u', '--url', dest='url',
                            help='URL to get issues from using the backend',
                            default=None)
        parser.add_argument('-l', '--logtable', action='store_true',
                            dest='logtable',
                            help='Enable generation of issues log table',
                            default=False)
        parser.add_argument('-n', '--num-issues', type=int, dest='nissues',
                            help='Number of issues requested on each query',
                            default=MAX_ISSUES_PER_QUERY)

        # Options for output database
        group = parser.add_argument_group('Output database specific options')
        group.add_argument('--db-driver-out',
                           choices=['sqlite', 'mysql', 'postgresql'],
                           dest='db_driver_out', help='Output database driver',
                           default='mysql')
        group.add_argument('--db-user-out', dest='db_user_out',
                           help='Database user name', default=None)
        group.add_argument('--db-password-out', dest='db_password_out',
                           help='Database user password', default=None)
        group.add_argument('--db-hostname-out', dest='db_hostname_out',
                           help='Name of the host where database server is running',
                           default='localhost')
        group.add_argument('--db-port-out', dest='db_port_out',
                           help='Port of the host where database server is running',
                           default='3306')
        group.add_argument('--db-database-out', dest='db_database_out',
                           help='Output database name', default=None)

        # Options for input database
        group = parser.add_argument_group('Input database specific options')
        group.add_argument('--db-driver-in',
                           choices=['sqlite', 'mysql', 'postgresql'],
                           dest='db_driver_in', help='Input database driver',
                           default=None)
        group.add_argument('--db-user-in', dest='db_user_in',
                           help='Database user name', default=None)
        group.add_argument('--db-password-in', dest='db_password_in',
                           help='Database user password', default=None)
        group.add_argument('--db-hostname-in', dest='db_hostname_in',
                           help='Name of the host where database server is running',
                           default=None)
        group.add_argument('--db-port-in', dest='db_port_in',
                           help='Port of the host where database server is running',
                           default=None)
        group.add_argument('--db-database-in', dest='db_database_in',
                           help='Input database name', default=None)

        # GitHub options
        group = parser.add_argument_group('GitHub specific options')
        group.add_argument('--newest-first', action='store_true', dest='newest_first',
                           help='Fetch newest issues first', default=False)

        # Maniphest options
        group = parser.add_mutually_exclusive_group()
        group.add_argument('--no-resume', action='store_true', dest='no_resume',
                           help='Disable resume mode (only on maniphest)', default=False)
        group.add_argument('--start-from', dest='start_from',
                           help='Do not retrieve issues after this date (only on maniphest)',
                           default=None)
        group.add_argument('--from-id', dest='from_id',
                           help='Retrieve issues in sequence from the given id (only on maniphest)',
                           default=None)

        args = parser.parse_args()

        if args.cfgfile is not None:  # if a config file was specified on the command line
            Config.load_from_file(args.cfgfile)  # try to load from that file
        else:
            Config.load()  # try to load a config file from default locations

        # Reconciling config file options with command-line options
        Config.__dict__.update(Config.clean_empty_options(args))
        Config.check_config()
示例#44
0
    parser.add_argument(
        '-q',
        '--quiet',
        dest='verbose',
        action='store_false',
        help='''Reduce text output to the screen. (default: False)''',
    )
    parser.add_argument(
        '--notest',
        dest='test_settings',
        action='store_false',
        help='''Do not test USRP settings until experiment start.
                (default: False)''',
    )

    txgroup = parser.add_argument_group(title='receiver')
    txgroup.add_argument(
        '-m',
        '--mainboard',
        default=None,
        help='''Mainboard address. (default: first device found)''',
    )
    txgroup.add_argument(
        '-d',
        '--subdevice',
        default=None,
        help='''USRP subdevice string. (default: mainboard default)''',
    )

    txgroup.add_argument(
        '-f',
示例#45
0
def main() -> None:

    parser = argparse.ArgumentParser(
        description="Main server process for a scheduled Speedtest tester.")
    parser.add_argument('--version',
                        action='version',
                        version=f'Speedtest Manager Client {__version__}',
                        help="Display the current version and exit")
    parser.add_argument(
        '-d',
        '--datadir',
        default=DEFAULT_DATADIR,
        type=Path,
        help=
        "The directory where data is stored, used only to calculate relative paths for Unix sockets"
    )

    logging_args = parser.add_argument_group(
        'Logging Settings',
        description="Arguments that controls logging output.")

    loglevel_group = logging_args.add_mutually_exclusive_group(required=False)
    loglevel_group.add_argument('-l',
                                '--loglevel',
                                type=str,
                                choices=LOGLEVELS,
                                dest='loglevel',
                                help="The level of logging to use.")
    loglevel_group.add_argument(
        '-v',
        '--verbose',
        action='store_const',
        const=VERBOSE_LOGLEVEL,
        dest='loglevel',
        help=
        f"Output all logging. This is equivalent to --loglevel {VERBOSE_LOGLEVEL}."
    )

    network_args = parser.add_argument_group(
        'Network Settings',
        description=
        "Arguments that control how the client connects to the manager.")

    family_group = network_args.add_mutually_exclusive_group(required=False)
    family_group.add_argument('-u',
                              '--unix',
                              action='store_const',
                              dest='family',
                              const=socket.AF_UNIX,
                              help="Uses a UNIX socket for connections")
    family_group.add_argument('-4',
                              '--ipv4',
                              action='store_const',
                              dest='family',
                              const=socket.AF_INET,
                              help="Uses an IPV4 socket for connections")
    family_group.add_argument('-6',
                              '--ipv6',
                              action='store_const',
                              dest='family',
                              const=socket.AF_INET6,
                              help="Uses an IPV6 socket for connections")

    network_args.add_argument('-a',
                              '--host',
                              default=None,
                              type=str,
                              help="The address to connect to")
    network_args.add_argument('-p',
                              '--port',
                              default=8090,
                              type=int,
                              help="The port to connect to")

    parser.set_defaults(family=socket.AF_UNIX, loglevel=DEFAULT_LOGLEVEL)

    subparsers = parser.add_subparsers(
        required=True,
        dest='operation',
        title="Operations",
        description="Operations that can be performed in the manager.")

    ##### New job

    new_job_parser = subparsers.add_parser(
        'new',
        help="Creates a new job",
        description="Creates a new job in the system with the given parameters."
    )

    new_job_parser.add_argument('id', type=str, help="The ID of the job")

    new_job_parser.add_argument(
        '-t',
        '--title',
        type=str,
        help="The title of the job (purely for readability purposes)")
    new_job_parser.add_argument(
        '-i',
        '--interval',
        type=parse_interval,
        default=None,
        help=
        "The interval between job executions. If not specified, the job is only ran once, and the end time is ignored."
    )
    new_job_parser.add_argument(
        '-s',
        '--start',
        type=parse_time,
        default=None,
        help="When to start the job. If not specified, starts immediately.")
    new_job_parser.add_argument(
        '-e',
        '--end',
        type=parse_time,
        default=None,
        help=
        "When to stop the job. If not specified, the job will run until manually stopped."
    )

    server_id_group = new_job_parser.add_mutually_exclusive_group(
        required=True)
    server_id_group.add_argument(
        '--server-id',
        type=int,
        default=None,
        help="The ID of the server to use for the job")
    server_id_group.add_argument(
        '--server-name',
        type=str,
        default=None,
        help="The hostname of the server to use for the job")

    def new_job(client: ManagerClient, args) -> None:

        job = Job(
            id=args.id,
            title=args.title,
            server_id=args.server_id,
            server_name=args.server_name,
            interval=args.interval,
            start=args.start,
            end=args.end,
        )
        id = client.new_job(job)

        print(f"Created job with ID '{id}'.")

    new_job_parser.set_defaults(func=new_job)

    ##### Get single job

    get_job_parser = subparsers.add_parser(
        'job',
        help="Describe a job",
        description="Retrieves information about a single job.")

    get_job_parser.add_argument('id', type=str, help='The ID of the job')

    def get_job(client: ManagerClient, args) -> None:

        job: Job = client.get_job(args.id)
        print(json.dumps(job.to_json()))

    get_job_parser.set_defaults(func=get_job)

    ##### Get job list

    get_jobs_parser = subparsers.add_parser(
        'jobs',
        help="List registered jobs",
        description="Retrieves information about all registered job.")

    get_job_filter_group = get_jobs_parser.add_mutually_exclusive_group(
        required=False)

    get_job_filter_group.add_argument('-r',
                                      '--running',
                                      action='store_const',
                                      const=True,
                                      dest='running',
                                      help="Only retrieve running jobs")
    get_job_filter_group.add_argument('-s',
                                      '--stopped',
                                      action='store_const',
                                      const=False,
                                      dest='running',
                                      help="Only retrieve stopped jobs")

    def get_jobs(client: ManagerClient, args) -> None:

        jobs: Set[Job] = client.get_jobs(args.running)
        print(json.dumps({'jobs': [job.to_json() for job in jobs]}))

    get_jobs_parser.set_defaults(func=get_jobs, running=None)

    ##### Stop job

    stop_job_parser = subparsers.add_parser(
        'stop',
        help="Stops a running job",
        description="Stops a running job before its scheduled finish date.")

    stop_job_parser.add_argument('id', type=str, help='The ID of the job')

    def stop_job(client: ManagerClient, args) -> None:

        job: Job = client.stop_job(args.id)
        print(json.dumps(job.to_json()))

    stop_job_parser.set_defaults(func=stop_job)

    ##### Delete job

    delete_job_parser = subparsers.add_parser(
        'delete',
        help="Deletes a job",
        description="Deletes a job from the system, including its results.")

    delete_job_parser.add_argument('id', type=str, help='The ID of the job')

    def delete_job(client: ManagerClient, args) -> None:

        job: Job = client.delete_job(args.id)
        print(json.dumps(job.to_json()))

    delete_job_parser.set_defaults(func=delete_job)

    ##### Get results

    get_results_parser = subparsers.add_parser(
        'results',
        help="Retrieves job results",
        description="Retrieves the results obtained so far by registered jobs."
    )

    get_results_parser.add_argument(
        'id',
        type=str,
        nargs='+',
        help='The ID of each job to get results for')

    def get_results(client: ManagerClient, args) -> None:

        results = client.get_results(args.id)
        print(json.dumps(results))

    get_results_parser.set_defaults(func=get_results)

    ##### Run program

    args = parser.parse_args()

    datadir: Path = args.datadir

    loglevel: int = getattr(logging, args.loglevel)
    setup_logging(True, logdir=None, level=loglevel)

    logging.info("Program starting.")

    family: int = args.family
    host: str = args.host
    port: int = args.port
    if family == socket.AF_UNIX:
        address = str(datadir / (host if host is not None else 'server.sock'))
    elif family == socket.AF_INET:
        address = (host if host is not None else '127.0.0.1', port)
    elif family == socket.AF_INET6:
        address = (host if host is not None else '::1', port)
    else:
        raise ValueError("Unsupported address family.")

    client = ManagerClient(family, address)
    try:
        args.func(client, args)
    except SpeedtestError as e:
        print(f"Error: {e}")
示例#46
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument("-u",
                        "--update",
                        action="store_true",
                        help="update battery level")
    parser.add_argument("-l",
                        "--list",
                        action="store_true",
                        help="list devices")
    parser.add_argument("-d",
                        "--daemon",
                        action="store_true",
                        help="run daemon")

    group = parser.add_argument_group(
        description=
        "Use these options to remotely enable/disable bluetooth or wifi or to ring a device."
        +
        " Pass either the device_id or objectId (see -l) or the device label, if it is unique."
        + " These options can be specified multiple times.")
    group.add_argument("-r",
                       "--ring",
                       action="append",
                       metavar="DEV",
                       help="ring device")
    group.add_argument("-b",
                       "--disable-bluetooth",
                       action="append",
                       metavar="DEV",
                       help="disable bluetooth")
    group.add_argument("-B",
                       "--enable-bluetooth",
                       action="append",
                       metavar="DEV",
                       help="enable bluetooth")
    group.add_argument("-w",
                       "--disable-wifi",
                       action="append",
                       metavar="DEV",
                       help="disable wifi")
    group.add_argument("-W",
                       "--enable-wifi",
                       action="append",
                       metavar="DEV",
                       help="enable wifi")

    parser.add_argument(
        "-v",
        "--verbose",
        action="store_true",
        help=
        "verbose, machine-readable mode. each output line can be parsed as JSON"
    )

    args = parser.parse_args()

    try:
        p = pytential.Pytential()
    except pytential.LoginError:
        if args.daemon:
            raise

        username = input("E-Mail: ")
        password = getpass.getpass()
        pytential.login(username, password)

        p = pytential.Pytential()

    if args.update:
        if p.is_registered():
            result = p.update()
            if args.verbose:
                print(json.dumps({
                    "status": "updated",
                    "result": result,
                }))
        else:
            result = p.register()
            if args.verbose:
                print(json.dumps({
                    "status": "registered",
                    "result": result,
                }))

    if args.list:
        devices = p.get_devices()

        if args.verbose:
            print(json.dumps({
                "status": "devices",
                "result": devices,
            }))
        else:
            print("Devices:")
            for device in p.get_devices():
                d = {
                    "manufacturer_name": "\033[33m<none>\033[0m",
                    "model_number": "\033[33m<none>\033[0m",
                }
                d.update(device)
                d["updatedAtF"] = format_time(d["updatedAt"])

                d["wifi_f"] = d[
                    "wifi_state"] and "\033[2;32m⌔ Enabled" or u"\033[2;31m\u0338⌔ Disabled"
                d["bluetooth_f"] = d[
                    "bluetooth_state"] and "\033[34mꔪ Enabled" or u"\033[33m\u0338ꔪ Disabled"

                d["state_c"] = "\033[2;33m"
                if d["state"].lower() in ("charging", "fully charged",
                                          "pending charge"):
                    d["state_c"] = "\033[2;32m"
                elif d["state"].lower() in ("discharging", "empty",
                                            "pending discharge"):
                    d["state_c"] = "\033[2;31m"

                level_colors = ("2;31", "0;31", "0;33", "2;32", "0;32")
                d["level_c"] = level_colors[round(d["value"] / 100.0 *
                                                  (len(level_colors) - 1))]

                print("""
  Name:      %(name)-20s\
  Vendor:    %(manufacturer_name)s
  Type:      %(device_type)-20s\
  Model:     %(model_number)s
  Device ID: %(device_id)-20s\
  Object ID: %(objectId)s
  Battery:   \033[%(level_c)sm%(value)3d%%\033[0m                \
  Wi-Fi:     %(wifi_f)s\033[0m
  State:     %(state_c)s%(state)-20s\033[0m\
  Bluetooth: %(bluetooth_f)s\033[0m
  Updated:   %(updatedAtF)s
  """.rstrip() % d)

    if args.ring:
        for dev in args.ring:
            result = p.ring_device(dev)
            if args.verbose:
                print(
                    json.dumps({
                        "status": "remote_control",
                        "device": dev,
                        "feature": "ring",
                        "result": result,
                    }))
            elif result:
                print("Ringed device %s" % dev)
            else:
                print("Failed to ring on %s" % dev)

    if args.enable_bluetooth:
        for dev in args.enable_bluetooth:
            result = p.remote_control(dev, "BT", True)
            if args.verbose:
                print(
                    json.dumps({
                        "status": "remote_control",
                        "device": dev,
                        "feature": "bt",
                        "enable": True,
                        "result": result,
                    }))
            elif result:
                print("Bluetooth enabled on %s" % dev)
            else:
                print("Failed to enable bluetooth on %s" % dev)

    if args.disable_bluetooth:
        for dev in args.disable_bluetooth:
            result = p.remote_control(dev, "BT", False)
            if args.verbose:
                print(
                    json.dumps({
                        "status": "remote_control",
                        "device": dev,
                        "feature": "bt",
                        "enable": False,
                        "result": result,
                    }))
            elif result:
                print("Bluetooth disabled on %s" % dev)
            else:
                print("Failed to disable bluetooth on %s" % dev)

    if args.enable_wifi:
        for dev in args.enable_wifi:
            result = p.remote_control(dev, "wifi", True)
            if args.verbose:
                print(
                    json.dumps({
                        "status": "remote_control",
                        "device": dev,
                        "feature": "wifi",
                        "enable": True,
                        "result": result,
                    }))
            elif result:
                print("Wifi enabled on %s" % dev)
            else:
                print("Failed to enable Wifi on %s" % dev)

    if args.disable_wifi:
        for dev in args.disable_wifi:
            result = p.remote_control(dev, "WIFI", False)
            if args.verbose:
                print(
                    json.dumps({
                        "status": "remote_control",
                        "device": dev,
                        "feature": "wifi",
                        "enable": False,
                        "result": result,
                    }))
            elif result:
                print("WiFi disabled on %s" % dev)
            else:
                print("Failed to disable WiFi on %s" % dev)

    if args.daemon:

        def on_property_change(interface, changed, removed):
            if "Percentage" in changed or "State" in changed:
                if args.verbose:
                    print(
                        json.dumps({
                            "status":
                            "changed",
                            "percentage":
                            upower.battery.get_percentage(),
                            "state":
                            upower.battery.get_state(),
                        }))
                result = p.update()
                if args.verbose:
                    print(json.dumps({
                        "status": "updated",
                        "result": result,
                    }))

        upower.battery.add_property_handler(on_property_change)
        upower.loop()