コード例 #1
0
def parse_and_set_args(block):
    args = parser.parse_args()

    torch.backends.cudnn.benchmark = True
    block.log("Enabling torch.backends.cudnn.benchmark")

    if args.resume != '':
        block.log("Setting initial eval to true since checkpoint is provided")
        args.initial_eval = True

    args.rank = int(os.getenv('RANK', 0))
    args.world_size = int(os.getenv("WORLD_SIZE", 1))

    if args.local_rank:
        args.rank = args.local_rank
    if args.local_rank is not None and args.local_rank != 0:
        utils.block_print()

    block.log("Creating save directory: {}".format(
        os.path.join(args.save, args.name)))
    args.save_root = os.path.join(args.save, args.name)
    os.makedirs(args.save_root, exist_ok=True)
    assert os.path.exists(args.save_root)

    # temporary directory for torch pre-trained models
    os.makedirs(args.torch_home, exist_ok=True)
    os.environ['TORCH_HOME'] = args.torch_home

    defaults, input_arguments = {}, {}
    for key in vars(args):
        defaults[key] = parser.get_default(key)

    for argument, value in sorted(vars(args).items()):
        if value != defaults[argument] and argument in vars(
                parser.parse_args()).keys():
            input_arguments['--' + str(argument)] = value
            block.log('{}: {}'.format(argument, value))

    if args.rank == 0:
        utils.copy_arguments(input_arguments, os.path.realpath(__file__),
                             args.save_root)

    args.network_class = utils.module_to_dict(models)[args.model]
    args.optimizer_class = utils.module_to_dict(torch.optim)[args.optimizer]
    args.dataset_class = utils.module_to_dict(datasets)[args.dataset]

    if args.backbone != 'none':
        args.backbone = utils.module_to_dict(backbone_models)[args.backbone]
    else:
        args.backbone = None

    return args
コード例 #2
0
ファイル: fasp2smt.py プロジェクト: alviano/python
def parseArguments():
    global VERSION
    global GPL
    global args
    parser = argparse.ArgumentParser(description=GPL.split("\n")[1], epilog="Copyright (C) 2015  Mario Alviano ([email protected])")
    parser.add_argument('--help-syntax', action='store_true', help='print syntax description and exit') 
    parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + VERSION, help='print version number')
    parser.add_argument('-g', '--grounder', metavar='<grounder>', type=str, help='path to the gringo 4.5 or higher (default \'gringo\')', default='gringo')
    parser.add_argument('-s', '--solver', metavar='<solver>', type=str, help='path to the SMT solver (default \'z3\')', default='z3')
    parser.add_argument('--print-grounder-input', action='store_true', help='print the input of the grounder')
    parser.add_argument('--print-grounder-output', action='store_true', help='print the output of the grounder')
    parser.add_argument('--print-smt-input', action='store_true', help='print the input of the SMT solver')
    parser.add_argument('--print-smt-output', action='store_true', help='print the output of the SMT solver')
    parser.add_argument('-o', '--optimize-definedness', metavar='<strategy>', help='prefer more defined fuzzy answer sets; set optimization strategy: none (default), maximize, binary-search, progression, any', default='none')
    parser.add_argument('-p', '--precision', metavar='<epsilon>', type=float, help='precision required in definedness', default=0.01)
    parser.add_argument('args', metavar="...", nargs=argparse.REMAINDER, help="input files, and arguments for <grounder>")
    args = parser.parse_args()

    assert args.optimize_definedness in ['none', 'maximize', 'binary-search', 'progression', 'any']

    args.files = []
    args.grounder_args = []
    for arg in args.args:
        if os.path.isfile(arg) or arg == "/dev/stdin": args.files.append(arg)
        else: args.grounder_args.append(arg)
    if args.help_syntax: helpSyntax()
コード例 #3
0
    def get(self):
        args = parser.parse_args()

        db_sess = db_session.create_session()
        jobs = db_sess.query(Jobs).get(args["job_id"])
        if not jobs:
            return jsonify({"error": "Not found"})
        return jsonify({'jobs': jobs.to_dict()})
コード例 #4
0
    def get(self):
        args = parser.parse_args()
        user_id = args['user_id']

        return abort_if_user_not_found(user_id)

        db_sess = db_session.create_session()
        users = db_sess.query(User).get(user_id)
        return jsonify({'users': users.to_dict()})
コード例 #5
0
ファイル: change-epg.py プロジェクト: seomin3/sandbox
def argparsers():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("--project_id")
    parser.add_argument("--create_network", action="store_true")
    parser.add_argument("--delete_network", action="store_true")
    parser.add_argument("--attach_interface", action="store_true")
    parser.add_argument("--detach_interface", action="store_true")
    args = parser.parse_args()
    return args
コード例 #6
0
    def delete(self):
        args = parser.parse_args()

        db_sess = db_session.create_session()
        jobs = db_sess.query(Jobs).get(args["job_id"])

        if not jobs:
            return jsonify({'error': 'Not found'})
        db_sess.delete(jobs)
        db_sess.commit()
        return jsonify({'success': 'OK'})
コード例 #7
0
    def delete(self):
        args = parser.parse_args()
        user_id = args['user_id']

        return abort_if_user_not_found(user_id)

        db_sess = db_session.create_session()
        user = db_sess.query(User).get(user_id)

        db_sess.delete(user)
        db_sess.commit()
        return jsonify({'success': 'OK'})
コード例 #8
0
    def post(self):
        args = parser.parse_args()

        if not args:
            return jsonify({"error": "Empty request"})
        elif not all(key in args for key in USER_FIELDS):
            return jsonify({"error": "Bad request"})

        db_sess = db_session.create_session()
        user = User(**args)
        db_sess.add(user)
        db_sess.commit()
        return jsonify({"success": "OK"})
コード例 #9
0
ファイル: wall.py プロジェクト: 0x0d/wallofshame
        def load_config(self):
                parser = optparse.OptionParser()
                parser.add_option('-i', '--iface', dest='listen_interface', default='mon0', help='Interface to listen')
                parser.add_option('-p', '--pcap', dest='pcap_file', default='None', help='Pcap file to read')
                parser.add_option('--filter', dest='filter', default='tcp dst port 80 or tcp dst port 8080 or tcp dst port 3128 or tcp dst port 5190 or tcp dst port 110 or tcp dst port 25 or tcp dst port 2041 or tcp dst port 21 or tcp dst port 143', help='Tcpdump filter for password sniff')
                parser.add_option('--p0f-filter', dest='p0f_filter', default='tcp dst port 80 and tcp[tcpflags] & tcp-syn == tcp-syn', help='Tcpdump filter for p0f OS fingerprint')
                parser.add_option('--db-host', dest='db_host', default='localhost', help='Database host')
                parser.add_option('--db-user', dest='db_user', default='root', help='Database user')
                parser.add_option('--db-password', dest='db_password', default='', help='Database password')
                parser.add_option('--db-database', dest='db_database', default='wall', help='Database name')
                parser.add_option('--tcp_timeout', dest='tcp_assemble_timeout', type='int', default='10', help='TCP stream reassemble timeout')

                self.options = parser.parse_args()[0]
コード例 #10
0
ファイル: generate.py プロジェクト: th133/append-markdown
def parse_user_input_and_get_format():
  user_input = parser.parse_args()
  user_input = vars(user_input)

  check = [abspath(path) for path in user_input["inputs"]]
  all_files = get_all_files(check, user_input["types"], user_input["recurse"])
  
  # Initializing output dictionary
  formats = {}
  for file in all_files:
    grab_formats(file, formats)

  # Output based on --outform
  output[user_input["outtype"]](formats = formats, output_format = user_input["outform"])
コード例 #11
0
ファイル: user_resource.py プロジェクト: MaxFunProger/app
 def post(self):
     args = parser.parse_args()
     session = db_session.create_session()
     users = User(
         surname=args['surname'],
         name=args['name'],
         user_id=args['user_id'],
         position=args['position'],
         speciality=args['speciality'],
         hashed_password=args['hashed_password']
     )
     session.add(users)
     session.commit()
     return jsonify({'success': 'OK'})
コード例 #12
0
    def post(self):
        args = parser.parse_args()

        if not args:
            return jsonify({"error": "Empty request"})
        elif not all(key in args for key in [
                'id', 'team_leader', 'job', 'work_size', 'collaborators',
                'start_date', 'end_date', 'is_finished'
        ]):
            return jsonify({"error": "Bad request"})

        db_sess = db_session.create_session()
        jobs = Jobs(**args)
        db_sess.add(jobs)
        db_sess.commit()
        return jsonify({"success": "OK"})
コード例 #13
0
    def put(self):
        args = parser.parse_args()
        user_id = args['user_id']

        if not args:
            return jsonify({"error": "Empty request"})
        elif not all(key in args for key in USER_FIELDS):
            return jsonify({"error": "Bad request"})

        return abort_if_user_not_found(user_id)

        db_sess = db_session.create_session()
        user = db_sess.query(User).get(user_id)

        user.update(args)
        db_sess.commit()
        return jsonify({'success': 'OK'})
コード例 #14
0
    def load_config(self):
        parser = optparse.OptionParser()
        parser.add_option('-i',
                          '--iface',
                          dest='listen_interface',
                          default='mon0',
                          help='Interface to listen')
        parser.add_option('-p',
                          '--pcap',
                          dest='pcap_file',
                          default='None',
                          help='Pcap file to read')
        parser.add_option(
            '--filter',
            dest='filter',
            default=
            'tcp dst port 80 or tcp dst port 8080 or tcp dst port 3128 or tcp dst port 5190 or tcp dst port 110 or tcp dst port 25 or tcp dst port 2041 or tcp dst port 21 or tcp dst port 143',
            help='Tcpdump filter for password sniff')
        parser.add_option(
            '--p0f-filter',
            dest='p0f_filter',
            default='tcp dst port 80 and tcp[tcpflags] & tcp-syn == tcp-syn',
            help='Tcpdump filter for p0f OS fingerprint')
        parser.add_option('--db-host',
                          dest='db_host',
                          default='localhost',
                          help='Database host')
        parser.add_option('--db-user',
                          dest='db_user',
                          default='root',
                          help='Database user')
        parser.add_option('--db-password',
                          dest='db_password',
                          default='',
                          help='Database password')
        parser.add_option('--db-database',
                          dest='db_database',
                          default='wall',
                          help='Database name')
        parser.add_option('--tcp_timeout',
                          dest='tcp_assemble_timeout',
                          type='int',
                          default='10',
                          help='TCP stream reassemble timeout')

        self.options = parser.parse_args()[0]
コード例 #15
0
def main():
    args = parser.parse_args()

    try:
        config = build_config(args)
        pdf_modifier = PdfModifier(config)
        pdf_modifier.execute()

        sys.exit(0)
    except Exception as e:
        if args.verbose:
            print("{:=^30}".format(" Stack Trace"))
            traceback.print_exc()
        else:
            t, v, tb = sys.exc_info()
            print("%s\n", v)
            sys.exit(1)
コード例 #16
0
    def put(self):
        args = parser.parse_args()

        if not args:
            return jsonify({"error": "Empty request"})
        elif not all(key in args for key in [
                'id', 'team_leader', 'job', 'work_size', 'collaborators',
                'start_date', 'end_date', 'is_finished'
        ]):
            return jsonify({"error": "Bad request"})

        db_sess = db_session.create_session()
        jobs = db_sess.query(Jobs).get(args["job_id"])

        if not jobs:
            return jsonify({'error': 'Not found'})

        jobs.update(args)
        db_sess.commit()
        return jsonify({'success': 'OK'})
コード例 #17
0
import random
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
from utils import *
from glob import glob
from functools import reduce
from collections import defaultdict
from focal_loss import BinaryFocalLoss
from TrackNet import ResNet_Track
from tensorflow import keras
from parser import parser
from tensorflow.keras import backend as K

args = parser.parse_args()
tol = args.tol
save_weights = args.save_weights
HEIGHT = args.HEIGHT
WIDTH = args.WIDTH
BATCH_SIZE = args.batch_size
FRAME_STACK = args.frame_stack
pre_trained = args.pre_trained

optimizer = keras.optimizers.Adadelta(lr=args.lr)
if not pre_trained:
    model = ResNet_Track(input_shape=(FRAME_STACK, HEIGHT, WIDTH))
    model.compile(loss=BinaryFocalLoss(gamma=2),
                  optimizer=optimizer,
                  metrics=[keras.metrics.BinaryAccuracy()])
else:
コード例 #18
0
import sys

import commands  # noqa
from parser import parser


if len(sys.argv) < 2:
    args = parser.parse_args(['--help'])
else:
    args = parser.parse_args()

# Runs the command end exits the script with the return-code of the command
exit(args.func(args))
コード例 #19
0
import sys

import commands  # noqa
from parser import parser

if len(sys.argv) < 2:
    args = parser.parse_args(["--help"])
else:
    args = parser.parse_args()

# Runs the command end exits the script with the return-code of the command
exit(args.func(args))
コード例 #20
0
'''__DEBUG_ARGS = ['--api_key', '2fbborrg7dwqegs40t4gacfg14', 
                '--db_address','http://66.172.13.75/pas/pasql/select?s=select%20%2A%20from%20proj_master%20where%20started%3E1380000000&db=asi.db', '--db_encoding', 'utf-8',
                '--sheet_id', '5835894021220228',  
                '--column_id', '1294997367613316']'''

_LOG_FORMAT = '''%(asctime)-15s: %(message)s \n \n \
    Old Options: %(old_options)s \n \n  \
    New Options: %(new_options)s \n \n \n'''

if __name__ == '__main__':
    from AqProjectUpdater import AqProjectUpdater
    from parser import parser
    import signal # Persistent python process for scheduling
    
    if __DEBUG:
        parser.parse_args(args = sys.argv[1:] + __DEBUG_ARGS)
    else:
        parser.parse_args()
        
    settings = parser.values.__dict__
    
    # Import SmartSheet stuff
    sys.path.insert(0, settings.pop('smartsheet_path'))
    from SmartSheet.SmartObjects import *
    from SmartSheet.SmartSocket import SmartSocket
    from SmartSheet.util import align
    
    # Set up directories
    home_dir = os.path.join(settings.pop('home_dir'), 'AqProjectUpdater-files')
    work_dir = os.path.join(home_dir, 'temp')
    
コード例 #21
0
def parseArguments():
    global VERSION
    global GPL
    global args
    parser = argparse.ArgumentParser(
        description=GPL.split("\n")[1],
        epilog="Copyright (C) 2015  Mario Alviano ([email protected])")
    parser.add_argument('--help-syntax',
                        action='store_true',
                        help='print syntax description and exit')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='%(prog)s ' + VERSION,
                        help='print version number')
    parser.add_argument(
        '-g',
        '--grounder',
        metavar='<grounder>',
        type=str,
        help='path to the gringo 4.5 or higher (default \'gringo\')',
        default='gringo')
    parser.add_argument('-s',
                        '--solver',
                        metavar='<solver>',
                        type=str,
                        help='path to the SMT solver (default \'z3\')',
                        default='z3')
    parser.add_argument('--print-grounder-input',
                        action='store_true',
                        help='print the input of the grounder')
    parser.add_argument('--print-grounder-output',
                        action='store_true',
                        help='print the output of the grounder')
    parser.add_argument('--print-smt-input',
                        action='store_true',
                        help='print the input of the SMT solver')
    parser.add_argument('--print-smt-output',
                        action='store_true',
                        help='print the output of the SMT solver')
    parser.add_argument(
        '-o',
        '--optimize-definedness',
        metavar='<strategy>',
        help=
        'prefer more defined fuzzy answer sets; set optimization strategy: none (default), maximize, binary-search, progression, any',
        default='none')
    parser.add_argument('-p',
                        '--precision',
                        metavar='<epsilon>',
                        type=float,
                        help='precision required in definedness',
                        default=0.01)
    parser.add_argument('args',
                        metavar="...",
                        nargs=argparse.REMAINDER,
                        help="input files, and arguments for <grounder>")
    args = parser.parse_args()

    assert args.optimize_definedness in [
        'none', 'maximize', 'binary-search', 'progression', 'any'
    ]

    args.files = []
    args.grounder_args = []
    for arg in args.args:
        if os.path.isfile(arg) or arg == "/dev/stdin": args.files.append(arg)
        else: args.grounder_args.append(arg)
    if args.help_syntax: helpSyntax()
コード例 #22
0
ファイル: cancel_task_test.py プロジェクト: staticzeng/hermes
class CancelTaskTest(unittest.TestCase):
    def __init__(self, testname, ip, data):
        super(CancelTaskTest, self).__init__(testname)
        self.ct = CancelTask(ip, data)

    def setUp(self):
        pass

    def tearDown(self):
        pass

    def testCancelTask(self):
        ip = self.ct.ip
        url = "http://%s:%s/cancel_task" % (ip, PORT)
        code, result = post(url, self.ct.data)
        print code, result
        self.assertEqual(result['ret'], 4)


if __name__ == "__main__":
    options, args = parser.parse_args()
    ip = options.ip if options.ip else HOST
    data = dict(
        task_id=options.taskid if options.taskid else default["task_id"])
    print "ip:", ip
    print "data:", data
    suite = unittest.TestSuite()
    suite.addTest(CancelTaskTest("testCancelTask", ip, data))
    unittest.TextTestRunner().run(suite)
コード例 #23
0
 def parse(self):
     self.args = parser.parse_args()
     self.dispatch()
コード例 #24
0
def main():
    with utils.TimerBlock("\nParsing Arguments") as block:
        args = parser.parse_args()

        args.rank = int(os.getenv('RANK', 0))

        block.log("Creating save directory: {}".format(args.save))
        args.save_root = os.path.join(args.save, args.name)
        if args.write_images or args.write_video:
            os.makedirs(args.save_root, exist_ok=True)
            assert os.path.exists(args.save_root)
        else:
            os.makedirs(args.save, exist_ok=True)
            assert os.path.exists(args.save)

        os.makedirs(args.torch_home, exist_ok=True)
        os.environ['TORCH_HOME'] = args.torch_home

        args.gpus = torch.cuda.device_count() if args.gpus < 0 else args.gpus
        block.log('Number of gpus: {} | {}'.format(args.gpus,
                                                   list(range(args.gpus))))

        args.network_class = utils.module_to_dict(models)[args.model]
        args.dataset_class = utils.module_to_dict(datasets)[args.dataset]
        block.log('save_root: {}'.format(args.save_root))
        block.log('val_file: {}'.format(args.val_file))

    with utils.TimerBlock("Building {} Dataset".format(args.dataset)) as block:
        vkwargs = {
            'batch_size': args.gpus * args.val_batch_size,
            'num_workers': args.gpus * args.workers,
            'pin_memory': True,
            'drop_last': True
        }
        step_size = args.val_step_size if args.val_step_size > 0 else (
            args.num_interp + 1)
        val_dataset = args.dataset_class(args=args,
                                         root=args.val_file,
                                         num_interp=args.num_interp,
                                         sample_rate=args.val_sample_rate,
                                         step_size=step_size)

        val_loader = torch.utils.data.DataLoader(val_dataset,
                                                 shuffle=False,
                                                 **vkwargs)

        args.folder_list = natsort.natsorted([
            os.path.basename(f)
            for f in sorted(glob(os.path.join(args.val_file, '*')))
        ])

        block.log('Number of Validation Images: {}:({} mini-batches)'.format(
            len(val_loader.dataset), len(val_loader)))

    with utils.TimerBlock("Building {} Model".format(args.model)) as block:
        model = args.network_class(args)

        block.log('Number of parameters: {val:,}'.format(val=sum([
            p.data.nelement() if p.requires_grad else 0
            for p in model.parameters()
        ])))

        block.log('Initializing CUDA')
        assert torch.cuda.is_available(
        ), 'Code supported for GPUs only at the moment'
        model = model.cuda()
        model = torch.nn.DataParallel(model, device_ids=list(range(args.gpus)))
        torch.manual_seed(args.seed)

        block.log("Attempting to Load checkpoint '{}'".format(args.resume))
        if args.resume and os.path.isfile(args.resume):
            checkpoint = torch.load(args.resume)

            # Partial initialization
            input_dict = checkpoint['state_dict']
            curr_dict = model.module.state_dict()
            state_dict = input_dict.copy()
            for key in input_dict:
                if key not in curr_dict:
                    continue
                if curr_dict[key].shape != input_dict[key].shape:
                    state_dict.pop(key)
                    print(
                        "key {} skipped because of size mismatch.".format(key))
            model.module.load_state_dict(state_dict, strict=False)

            epoch = checkpoint['epoch']
            block.log(
                "Successfully loaded checkpoint (at epoch {})".format(epoch))
        elif args.resume:
            block.log("No checkpoint found at '{}'.\nAborted.".format(
                args.resume))
            sys.exit(0)
        else:
            block.log("Random initialization, checkpoint not provided.")

    with utils.TimerBlock("Inference started ") as block:
        evaluate(args, val_loader, model, args.num_interp, epoch, block)