Esempio n. 1
0
import numpy as np
import time
import torch
from torch import nn
from torch import tensor
from torch import optim
import torch.nn.functional as F
from torch.autograd import Variable
from torchvision import datasets, transforms
import torchvision.models as models
import argparse
from collections import OrderedDict
import os
import copy

parser = argparse.ArgumentParser(description='Train Image Classifier')

parser.add_argument('data_directory')
parser.add_argument('--save_dir', action='store', default='.')
parser.add_argument('--arch', action='store', default='vgg19')
parser.add_argument('--learning_rate',
                    action='store',
                    default=0.01,
                    type=float)
parser.add_argument('--hidden_units', default=512, type=int)
parser.add_argument('--epochs', action='store', default=10, type=int)
parser.add_argument('--gpu', action='store_true', default=False)

args = parser.parse_args()

data_dir = args.data_directory
Esempio n. 2
0
def parse_args_and_arch(parser, input_args=None, parse_known=False, suppress_defaults=False):
    if suppress_defaults:
        # Parse args without any default values. This requires us to parse
        # twice, once to identify all the necessary task/model args, and a second
        # time with all defaults set to None.
        args = parse_args_and_arch(
            parser,
            input_args=input_args,
            parse_known=parse_known,
            suppress_defaults=False,
        )
        suppressed_parser = argparse.ArgumentParser(add_help=False, parents=[parser])
        suppressed_parser.set_defaults(**{k: None for k, v in vars(args).items()})
        args = suppressed_parser.parse_args(input_args)
        return argparse.Namespace(**{
            k: v
            for k, v in vars(args).items()
            if v is not None
        })

    from fairseq.models import ARCH_MODEL_REGISTRY, ARCH_CONFIG_REGISTRY

    # The parser doesn't know about model/criterion/optimizer-specific args, so
    # we parse twice. First we parse the model/criterion/optimizer, then we
    # parse a second time after adding the *-specific arguments.
    # If input_args is given, we will parse those args instead of sys.argv.
    args, _ = parser.parse_known_args(input_args)

    # Add model-specific args to parser.
    if hasattr(args, 'arch'):
        model_specific_group = parser.add_argument_group(
            'Model-specific configuration',
            # Only include attributes which are explicitly given as command-line
            # arguments or which have default values.
            argument_default=argparse.SUPPRESS,
        )
        ARCH_MODEL_REGISTRY[args.arch].add_args(model_specific_group)

    # Add *-specific args to parser.
    from fairseq.registry import REGISTRIES
    for registry_name, REGISTRY in REGISTRIES.items():
        choice = getattr(args, registry_name, None)
        if choice is not None:
            cls = REGISTRY['registry'][choice]
            if hasattr(cls, 'add_args'):
                cls.add_args(parser)
    if hasattr(args, 'task'):
        from fairseq.tasks import TASK_REGISTRY
        TASK_REGISTRY[args.task].add_args(parser)
    if getattr(args, 'use_bmuf', False):
        # hack to support extra args for block distributed data parallelism
        from fairseq.optim.bmuf import FairseqBMUF
        FairseqBMUF.add_args(parser)

    # Parse a second time.
    if parse_known:
        args, extra = parser.parse_known_args(input_args)
    else:
        args = parser.parse_args(input_args)
        extra = None

    # Post-process args.
    if hasattr(args, 'max_sentences_valid') and args.max_sentences_valid is None:
        args.max_sentences_valid = args.max_sentences
    if hasattr(args, 'max_tokens_valid') and args.max_tokens_valid is None:
        args.max_tokens_valid = args.max_tokens
    if getattr(args, 'memory_efficient_fp16', False):
        args.fp16 = True

    # Apply architecture configuration.
    if hasattr(args, 'arch'):
        ARCH_CONFIG_REGISTRY[args.arch](args)

    if parse_known:
        return args, extra
    else:
        return args

def download(args):
    """
    Function for downloading all examples in AudioSet containing labels for given classes
    :param args:
    :return:
    """
    print("Downloading classes from AudioSet.")

    for class_name in args.classes:
        utils.download(class_name, args)


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('mode', type=str, choices=['find', 'download'])
    parser.add_argument('-c', '--classes', nargs='+', type=str,
                        help='list of classes to find in a given directory of audioset files')
    parser.add_argument('-b', '--blacklist', nargs='+', type=str,
                        help='list of classes which will exclude a clip from being downloaded')
    parser.add_argument('-d', '--destination_dir', type=str,
                        help='directory path to put downloaded (or found) files into')
    parser.add_argument('--audio_data_dir', type=str,
                        help='directory path containing pre-downloaded files from AudioSet')
    parser.add_argument('-fs', "--sample_rate", type=int,
                        help="Sample rate of audio to download. Default 16kHz")
    parser.add_argument('-s', '--strict',
                        help='If used, only match exact string argument passed')
    parser.add_argument('--label_file', type=str,
                        help='Path to CSV file containing AudioSet labels for each class')
Esempio n. 4
0
                     **{f'test_{k}': v for k, v in test_stats.items()},
                     'epoch': epoch,
                     'n_parameters': n_parameters}

        if args.output_dir and utils.is_main_process():
            with (output_dir / "log.txt").open("a") as f:
                f.write(json.dumps(log_stats) + "\n")

            # for evaluation logs
            if coco_evaluator is not None:
                (output_dir / 'eval').mkdir(exist_ok=True)
                if "bbox" in coco_evaluator.coco_eval:
                    filenames = ['latest.pth']
                    if epoch % 50 == 0:
                        filenames.append(f'{epoch:03}.pth')
                    for name in filenames:
                        torch.save(coco_evaluator.coco_eval["bbox"].eval,
                                   output_dir / "eval" / name)

    total_time = time.time() - start_time
    total_time_str = str(datetime.timedelta(seconds=int(total_time)))
    print('Training time {}'.format(total_time_str))


if __name__ == '__main__':
    parser = argparse.ArgumentParser('DETR training and evaluation script', parents=[get_args_parser()])
    args = parser.parse_args()
    if args.output_dir:
        Path(args.output_dir).mkdir(parents=True, exist_ok=True)
    main(args)
Esempio n. 5
0
class PathFinder:
    def midpoint(ptA, ptB):
        return ((ptA[0] + ptB[0]) * 0.5, (ptA[1] + ptB[1]) * 0.5)

    def calculatePath(goal_pos):
        finish_x, finish_y = centres[goal_pos][:]
        print("finsih coords", finish_x, finish_y)

        if finish_x > feet_x:
            x_dist = finish_x - feet_x
        else:
            x_dist = feet_x - finish_x

        if finish_y > feet_y:
            y_dist = finish_y - feet_y
        else:
            y_dist = feet_y - finish_y
        print(x_dist)
        print(y_dist)

        euc_x = x_dist * scale
        euc_y = y_dist * scale

        return euc_x, euc_y

    # construct the argument parse and parse the arguments
    ap = argparse.ArgumentParser()
    ap.add_argument("-i",
                    "--image",
                    required=True,
                    help="path to the input image")
    ap.add_argument(
        "-w",
        "--width",
        type=float,
        required=True,
        help="width of the left-most object in the image (in inches)")
    args = vars(ap.parse_args())

    # load the image, convert it to grayscale, and blur it slightly
    image = cv2.imread(args["image"])
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    gray = cv2.GaussianBlur(gray, (7, 7), 0)

    # perform edge detection, then perform a dilation + erosion to
    # close gaps in between object edges
    edged = cv2.Canny(gray, 50, 100)
    edged = cv2.dilate(edged, None, iterations=1)
    edged = cv2.erode(edged, None, iterations=1)

    dims = image.shape

    feet_y = image.shape[0]
    feet_x = image.shape[1] / 2

    print(feet_x, feet_y)

    centres = np.zeros((9, 2))
    mids = np.zeros((2, 1))

    # find contours in the edge map
    cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL,
                            cv2.CHAIN_APPROX_SIMPLE)
    cnts = imutils.grab_contours(cnts)

    # sort the contours from left-to-right and, then initialize the
    # distance colors and reference object
    (cnts, _) = contours.sort_contours(cnts)
    colors = ((0, 0, 255), (240, 0, 159), (0, 165, 255), (255, 255, 0),
              (255, 0, 255))
    refObj = None

    # loop over the contours individually
    # to get the centre points of each of our grids
    # we then use these to work out which positon is which
    count = 0
    for c in cnts:

        #print(cv2.contourArea(c))
        # if the contour is not sufficiently large, ignore it
        if cv2.contourArea(c) > 15000.0:
            continue

        # compute the rotated bounding box of the contour
        box = cv2.minAreaRect(c)
        box = cv2.cv.BoxPoints(box) if imutils.is_cv2() else cv2.boxPoints(box)
        box = np.array(box, dtype="int")

        # order the points in the contour such that they appear
        # in top-left, top-right, bottom-right, and bottom-left
        # order, then draw the outline of the rotated bounding
        # box
        box = perspective.order_points(box)

        # compute the center of the bounding box
        #print("box ", box[:,0], box[:,1])
        cX = np.average(box[:, 0])
        cY = np.average(box[:, 1])

        # draw the contours on the image
        orig = image.copy()
        cv2.drawContours(orig, [box.astype("int")], -1, (0, 255, 0), 2)
        #cv2.drawContours(orig, [refObj[0].astype("int")], -1, (0, 255, 0), 2)

        objCoords = np.vstack([box, (cX, cY)])
        midx, midy = objCoords[4, :]
        mids = midx, midy

        ##some formatting here, to get the ref obj distance etc.
        if count == 4:
            x = 300.0
            y = 212.5
            points = x, y
            centres[count, :] = points
            centres[count + 1, :] = mids
        elif count == 0:
            #we use the first reference box as the our scale as we know what size it is
            # then we can scale the rest of the pixels to calculate the distance in

            centres[count, :] = mids
            (tl, tr, br, bl) = box
            marker = tr - tl
            # print (tl, tr, br, bl)
            # print (marker)
            # print box
            (tlblX, tlblY) = midpoint(tl, bl)
            (trbrX, trbrY) = midpoint(tr, br)
            D = dist.euclidean((tlblX, tlblY), (trbrX, trbrY))
            scale = args["width"] / D
            # refObj = (box, (feet_x, feet_y), D / args["width"])
            # print(refObj[2])
        elif count > 4:
            centres[count + 1, :] = mids
        else:
            centres[count, :] = mids

        color = (0, 0, 255)

        cv2.circle(orig, (int(feet_x), int(feet_y)), 5, color, -1)
        if count == 4:
            cv2.circle(orig, (int(centres[4][0]), int(centres[4][1])), 5,
                       color, -1)

        cv2.circle(orig, (int(midx), int(midy)), 5, color, -1)

        cv2.imshow("Image", orig)
        cv2.waitKey(0)
        count += 1
def main():

    parser = argparse.ArgumentParser(description="usage: %prog [options]")

    parser.add_argument("--azureSovereignCloud",
                        dest="azureSovereignCloud",
                        default="public",
                        help="Azure Region [china|germany|public|usgov]")

    parser.add_argument("--tenantId",
                        dest="tenantId",
                        help="Tenant ID of the Azure subscription")

    parser.add_argument("--applicationId",
                        dest="applicationId",
                        help="Application ID of the Service Principal")

    parser.add_argument("--applicationSecret",
                        dest="applicationSecret",
                        help="Application Secret of the Service Principal")

    parser.add_argument("--username",
                        dest="username",
                        default="cc_admin",
                        help="The local admin user for the CycleCloud VM")

    parser.add_argument(
        "--hostname",
        dest="hostname",
        help=
        "The short public hostname assigned to this VM (or public IP), used for LetsEncrypt"
    )

    parser.add_argument("--acceptTerms",
                        dest="acceptTerms",
                        action="store_true",
                        help="Accept Cyclecloud terms and do a silent install")

    parser.add_argument(
        "--useLetsEncrypt",
        dest="useLetsEncrypt",
        action="store_true",
        help=
        "Automatically fetch certificate from Let's Encrypt.  (Only suitable for installations with public IP.)"
    )

    parser.add_argument(
        "--useManagedIdentity",
        dest="useManagedIdentity",
        action="store_true",
        help=
        "Use the first assigned Managed Identity rather than a Service Principle for the default account"
    )

    parser.add_argument("--dryrun",
                        dest="dryrun",
                        action="store_true",
                        help="Allow local testing outside Azure Docker")

    parser.add_argument("--password",
                        dest="password",
                        default="",
                        help="The password for the CycleCloud UI user")

    parser.add_argument("--publickey",
                        dest="publickey",
                        help="The public ssh key for the CycleCloud UI user")

    parser.add_argument(
        "--storageAccount",
        dest="storageAccount",
        help="The storage account to use as a CycleCloud locker")

    parser.add_argument(
        "--resourceGroup",
        dest="resourceGroup",
        help=
        "The resource group for CycleCloud cluster resources.  Resource Group must already exist.  (Default: same RG as CycleCloud)"
    )

    parser.add_argument(
        "--noDefaultAccount",
        dest="no_default_account",
        action="store_true",
        help=
        "Do not attempt to configure a default CycleCloud Account (useful for CycleClouds managing other subscriptions)"
    )

    parser.add_argument("--webServerMaxHeapSize",
                        dest="webServerMaxHeapSize",
                        default='4096M',
                        help="CycleCloud max heap")

    parser.add_argument("--webServerPort",
                        dest="webServerPort",
                        default=8080,
                        help="CycleCloud front-end HTTP port")

    parser.add_argument("--webServerSslPort",
                        dest="webServerSslPort",
                        default=8443,
                        help="CycleCloud front-end HTTPS port")

    parser.add_argument("--webServerClusterPort",
                        dest="webServerClusterPort",
                        default=9443,
                        help="CycleCloud cluster/back-end HTTPS port")

    parser.add_argument(
        "--webServerHostname",
        dest="webServerHostname",
        default="",
        help="Over-ride CycleCloud hostname for cluster/back-end connections")

    args = parser.parse_args()

    print("Debugging arguments: %s" % args)

    if not already_installed():
        configure_msft_repos()
        install_pre_req()
        download_install_cc()
        modify_cs_config(
            options={
                'webServerMaxHeapSize': args.webServerMaxHeapSize,
                'webServerPort': args.webServerPort,
                'webServerSslPort': args.webServerSslPort,
                'webServerClusterPort': args.webServerClusterPort,
                'webServerEnableHttps': True,
                'webServerHostname': args.webServerHostname
            })

    start_cc()

    install_cc_cli()

    if not args.dryrun:
        vm_metadata = get_vm_metadata()
    else:
        vm_metadata = {
            "compute": {
                "subscriptionId": "1234-50-679890",
                "location": "dryrun",
                "resourceGroupName": "dryrun-rg"
            }
        }

    if args.resourceGroup:
        print("CycleCloud created in resource group: %s" %
              vm_metadata["compute"]["resourceGroupName"])
        print("Cluster resources will be created in resource group: %s" %
              args.resourceGroup)
        vm_metadata["compute"]["resourceGroupName"] = args.resourceGroup

    cyclecloud_account_setup(vm_metadata, args.useManagedIdentity,
                             args.tenantId, args.applicationId,
                             args.applicationSecret, args.username,
                             args.azureSovereignCloud, args.acceptTerms,
                             args.password, args.storageAccount,
                             args.no_default_account, args.webServerSslPort)

    if args.useLetsEncrypt:
        letsEncrypt(args.hostname, vm_metadata["compute"]["location"])

    #  Create user requires root privileges
    # create_user_credential(args.username, args.publickey)

    clean_up()
Esempio n. 7
0
        else:
            for j in range(m):
                sens[j, :] = (contr_coeff**(2 * m) *
                              (sens[j, :] + sens_p[j, :]) + (2.0 * expan) *
                              (contr_coeff**(2 * (m - 1) - j)) *
                              np.sqrt(sens[j, :] + sens_p[j, :]) + (expan**2) *
                              (contr_coeff**(2 * (m - j - 1))))

    expo = alpha * (alpha - 1) * sens / sig_sq
    log_eta = logsumexp(expo, axis=0) - np.log(m)

    return w, log_eta


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='recursive mechanism')
    parser.add_argument('dname', help='dataset name')
    parser.add_argument('T', type=int, help='epoch')
    parser.add_argument('--data_dir', type=str, default=None)

    args = parser.parse_args()

    # load the dataset
    fpath = os.path.join(args.data_dir, f"{args.dname}.dat")
    X, y = load_dat(fpath, minmax=(0, 1), bias_term=True)
    # y[y < 0.5] = -1.0

    w, sen = sgd_recur(X,
                       y,
                       logistic_grad,
                       4000,
def parse_arguments():
    parser = argparse.ArgumentParser()
    parser.add_argument("-e",
                        "--extension",
                        help="Type de fichier à traiter (CSV ou XML)")
    return parser.parse_args()
Esempio n. 9
0
# -*- coding: utf-8 -*-

import argparse
import logging
import sys

from battle_handlers import set_client_id
from battle_handlers.area_battle_handler import area_battle
from battle_handlers.extreme_battle_handler import extreme_battle
from battle_handlers.sublimation_battle_handler import sublimation_battle
from battle_handlers.summons_battle_handler import summons_battle
from battle_handlers.trade_battle_handler import buy_battle
from battle_handlers.trade_battle_handler import exhibit_battle

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='An auto battle script for game kakuriyo-no-mon.')
    parser.add_argument(
        'command',
        choices=['area', 'extreme', 'sublimation', 'summons', 'trade'],
        help='battle type: { area, extreme, sublimation, summons, trade }',
        metavar='command')
    parser.add_argument('-s',
                        '--set-client-id',
                        action='store_true',
                        help='set a new client id')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='increase output verbosity')
    parser.add_argument('--exhibit',
                        action='store_true',
Esempio n. 10
0
#!usr/bin/python
import pickle
import matplotlib.pyplot as plt
import matplotlib.lines as lns
import matplotlib.ticker as mtick
from matplotlib.pyplot import cm
import numpy as np
import argparse
import os
plt.style.use('bmh')
parser=argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=("""
provide DT5000, 10000, 15000, 20000 and infinity in this order
"""))
parser.add_argument("p1",help="path to the right directory")
parser.add_argument("p2",help='path to 2nd directory')
parser.add_argument("p3",help='path to 3rd directory')
parser.add_argument("p4",help='path to 4th directory')
parser.add_argument("p5",help='path to 5th directory')
parser.add_argument("r", type=int, help="number of runs")
args=parser.parse_args()
pth1=args.p1
pth2=args.p2
pth3=args.p3
pth4=args.p4
pth5=args.p5
RUNS=args.r

nX=['n0/','n1/','n2/','n3/','n4/','n5/','n6/','n7/','n8/']
diz_pths={pth1:[nX],pth2:[nX],pth3:[nX],pth4:[nX],pth5:[['n0/']]}
diz_labels={pth1:"$\Delta T=5.0\\times 10^3$",pth2:"$\Delta T= 1.0\\times 10^4$",pth3:"$\Delta T= 1.5 \\times 10^4$",pth4:"$\Delta T= 2.0\\times 10^4$",pth5:"$\Delta T=\infty$"}
for key in diz_pths:
Esempio n. 11
0
def send_error_unauthorized(error):
    return errors.unauthorized_response()


@app.errorhandler(404)
def send_error_not_found(error):
    return errors.not_found_response()


@app.errorhandler(405)
def send_error_method_not_allowed(error):
    return errors.method_not_allowed_response()


@app.errorhandler(500)
def send_error_internal_server_error(error):
    return errors.internal_server_error_response()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='model prediction')

    parser.add_argument('--params','-p', type=str, default='',
                        help='path to the file which stores network parameters.')
    args = parser.parse_args()

    classifier = NoduleClassifier(args)

    users.initialize()
    app.run(host="0.0.0.0", port=5000)
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import tabulate
from fontTools import ttLib
from gftools.constants import (NAMEID_COPYRIGHT_NOTICE, PLATID_STR)

parser = argparse.ArgumentParser(description='Print out copyright'
                                 ' nameIDs strings')
parser.add_argument('font', nargs="+")
parser.add_argument('--csv',
                    default=False,
                    action='store_true',
                    help="Output data in comma-separate-values"
                    " (CSV) file format")


def main():
    args = parser.parse_args()

    rows = []
    for font in args.font:
        ttfont = ttLib.TTFont(font)
        for name in ttfont['name'].names:
Esempio n. 13
0
        print json

        groupRefs = userJson["groupRefs"]
        if groupRefs is None:
            groupRefs = []

        groupRefs.append({"id": groupJson["id"], "name": groupJson["name"]})

        updatedUserJson = self.updateUser(userJson["id"],
                                          userJson["loginName"], groupRefs)
        print updatedUserJson
        return


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Have user join a group')
    parser.add_argument('-g', '--group', help='group name', default=None)
    parser.add_argument('-l',
                        '--login',
                        help='user login to authenticate',
                        default=None)
    parser.add_argument('-p',
                        '--password',
                        help='password to authenticate',
                        default=None)
    parser.add_argument(
        '-u',
        '--url',
        default=
        'http://ec2-54-200-9-5.us-west-2.compute.amazonaws.com:8080/message/rest',
        help='url of the message server (including /message/rest/)')
Esempio n. 14
0
import pickle
import cv2
import argparse

parser = argparse.ArgumentParser(description = '''

                                               
This script generates Guided-GradCAM images. 
Script adapted from:
# Author:   Kazuto Nakashima
# URL:      http://kazuto1011.github.io
# Created:  2017-05-26


        Directory Information
        ===================================
        Note that the outdir will automatically be: model_output/model_name/
        -mpath:         modelpath, including the model name, that you want to make the visualization for
        -imgpath:      image path to evaluate
        
        CNN Options
        ====================================
        -topk:          number of choices to visualize - i.e., the first choice, second choice, third choice 
                        that the CNN would make as output by the softmax function
        
        
 
        ''')

parser.add_argument('-m', '--modelpath')
parser.add_argument('-state', '--imgpath')
parser.add_argument('-ii', '--topk', type = int)
Esempio n. 15
0
#!/usr/bin/env python
from stretch_body.hello_utils import *
import sys
import stretch_body.wrist_yaw as wrist_yaw
import argparse
import stretch_body.xbox_controller as xc

print_stretch_re_use()

parser=argparse.ArgumentParser(description='Jog the wrist_yaw joint from the keyboard')
args=parser.parse_args()

poses = {'zero':0, 'left':deg_to_rad(90), 'right': deg_to_rad(-45)}
w=wrist_yaw.WristYaw()
w.startup()

xbox_controller = xc.XboxController()
xbox_controller.start()

v_des=w.params['motion']['default']['vel']
a_des=w.params['motion']['default']['accel']
 wrist_yaw_left = controller_state['left_shoulder_button_pressed']
wrist_yaw_right = controller_state['right_shoulder_button_pressed']
def menu_top():
    print('------ MENU -------')
    print('m: menu')
    print('controller left shoulder: increment 15 deg')
    print('controller right shoulder: decrement 15 deg')
    

def step_interaction():
Esempio n. 16
0
# NOTE: 
Channel overrides (a.k.a "RC overrides") are highly discommended (they are primarily implemented 
for simulating user input and when implementing certain types of joystick control).

They are provided for development purposes. Please raise an issue explaining why you need them
and we will try to find a better alternative: https://github.com/dronekit/dronekit-python/issues

Full documentation is provided at http://python.dronekit.io/examples/channel_overrides.html
"""
from __future__ import print_function
from dronekit import connect


#Set up option parsing to get connection string
import argparse  
parser = argparse.ArgumentParser(description='Example showing how to set and clear vehicle channel-override information.')
parser.add_argument('--connect', 
                   help="vehicle connection target string. If not specified, SITL automatically started and used.")
args = parser.parse_args()

connection_string = args.connect
sitl = None


#Start SITL if no connection string specified
if not connection_string:
    import dronekit_sitl
    sitl = dronekit_sitl.start_default()
    connection_string = sitl.connection_string()

Esempio n. 17
0
def main():
    # Handle arguments
    parser = argparse.ArgumentParser(
        prog='Build',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=
        'This script build and pack the selected mission framework.',
        epilog=
        'This build script is primarly built to pack 7th Cavalry Script package; cScripts.\nThe tool should be cross platform and can be used for other packages as well.'
    )

    parser.add_argument('-b',
                        '--buildtype',
                        required=False,
                        choices=['release', 'dev', 'test', 'custom'],
                        default='test',
                        help="Add a additional tag to a to the build")
    parser.add_argument(
        "-p",
        "--public",
        help="Create a \"public\" build to be used on non CavPack Enviroment",
        required=False,
        action="store_true")

    parser.add_argument("-y",
                        "--fastbuild",
                        help="Will instantly run untill done.",
                        action="store_false")
    parser.add_argument(
        "-d",
        "--dontopenfolder",
        help="Don\'t open the release folder when the build is completed.",
        action="store_false")
    parser.add_argument("--deploy",
                        help="Deploy mode used by CI deployment.",
                        action="store_false")
    parser.add_argument("--color",
                        help="Enable colors in the script.",
                        action="store_true")

    parser.add_argument(
        '-v',
        '--version',
        action='version',
        version=
        'Author: Andreas Broström <*****@*****.**>\nScript version: {}'
        .format(__version__))

    args = parser.parse_args()

    # Construct public build
    public_file_paths = []
    public_operations = []

    if args.public:
        config_gear_files = config['PATHS']['config_gear_files']
        config_gear_files = config_gear_files.replace(' ', '')
        config_gear_files = config_gear_files.replace('\n', ',')
        config_gear_files = config_gear_files.split(',')
        config_gear_files = [x for x in config_gear_files
                             if x]  # Remove empty array object if exists.
        public_file_paths.append(config_gear_files)

        script_gear_files = config['PATHS']['script_gear_files']
        script_gear_files = script_gear_files.replace(' ', '')
        script_gear_files = script_gear_files.replace('\n', ',')
        script_gear_files = script_gear_files.split(',')
        script_gear_files = [x for x in script_gear_files
                             if x]  # Remove empty array object if exists.
        public_file_paths.append(script_gear_files)

        acearsenal_files = config['PATHS']['acearsenal_files']
        acearsenal_files = acearsenal_files.replace(' ', '')
        acearsenal_files = acearsenal_files.replace('\n', ',')
        acearsenal_files = acearsenal_files.split(',')
        acearsenal_files = [x for x in acearsenal_files
                            if x]  # Remove empty array object if exists.
        public_file_paths.append(acearsenal_files)

        # Replace Handler
        replace = config['PUBLIC BUILD OPERATIONS']['Replace_gear']
        replace = replace.replace(' ', '')
        replace = replace.replace('\n', ',')
        replace = replace.split(',')
        replace = [x for x in replace
                   if x]  # Remove empty array object if exists.
        replaces = []
        replacesList = []
        if not (len(replace) % 2) == 0:
            sys.exit(
                'Replace have a detected a uneven replace number. You can\'t replace anything in to nothing. Use the remove operation for this. \nBuild Aborted'
            )
        for x in replace:
            replaces.append(x)
            if len(replaces) == 2:
                replacesList.append(replaces)
                replaces = []
        public_operations.append(replacesList)

        # Remove Handler
        remove = config['PUBLIC BUILD OPERATIONS']['Remove_gear']

        remove = remove.replace(' ', '')
        remove = remove.replace('\n', ',')
        remove = remove.split(',')
        remove = [x for x in remove
                  if x]  # Remove empty array object if exists.
        removes = []
        for x in remove:
            removes.append(x)
        public_operations.append(removes)

        # Settings Change handler
        change_setting = config['PUBLIC BUILD OPERATIONS']['Change_settings']
        change_setting = change_setting.replace(', ', ',')
        change_setting = change_setting.replace('\n', ',')
        change_setting = change_setting.split(',')
        change_setting = [x for x in change_setting
                          if x]  # Remove empty array object if exists.
        change_settings = []
        change_settings_list = []
        if not (len(change_setting) % 2) == 0:
            sys.exit(
                'Settings changes have a detected a uneven change number. You can\'t change a settings in to nothing. \nBuild Aborted'
            )
        for x in change_setting:
            change_settings.append(x)
            if len(change_settings) == 2:
                change_settings_list.append(change_settings)
                change_settings = []
        public_operations.append(change_settings_list)

        # Settings Add handler
        add_setting = config['PUBLIC BUILD OPERATIONS']['New_settings']
        add_setting = add_setting.replace('\n', ',')
        add_setting = add_setting.split(',')
        add_setting = [x for x in add_setting
                       if x]  # Remove empty array object if exists.
        add_settings = []
        for x in add_setting:
            add_settings.append(x)
        public_operations.append(add_settings)

    # build handler
    print(
        color_string(
            'Preparing a {} build for {}'.format(args.buildtype, script_name),
            '\033[1m', args.color))
    if not os.path.isdir(rootDir + '/.git'):
        print(
            "  {} This is not a git repository. This will result in no hash for branch name being present in the build."
            .format(color_string('Warning!', '\033[93m', args.color)))
    else:
        print("  Version: {}".format(
            get_script_version_number(version_file, 'str')))
        print("  Branch:  {}".format(get_git_branch_name()))
        print("  Hash:    {}".format(get_git_commit_hash(False)))
    print()

    objects = fetch_objects()
    list_objects(objects, args.color)

    # press enter to start build
    input('\nPress enter to start the build process...'
          ) if args.fastbuild else print('')

    # prep release

    if args.buildtype == 'release':
        if not get_git_branch_name() == 'master':
            if args.deploy:
                if args.fastbuild:
                    action = request_action(
                        'You are currently not on master branch. Do you wish to checkout master?'
                    )
                    if action:
                        try:
                            subprocess.check_output(
                                ['git', 'checkout', 'master'], shell=True)
                        except:
                            action = request_action(
                                'Do you wish to continue anyways?')
                        if not action:
                            sys.exit()
                else:
                    try:
                        subprocess.check_output(['git', 'checkout', 'master'],
                                                shell=True)
                    except:
                        print(
                            color_string(
                                'Warning: Checkout was aborted. Your still on branch {}...'
                                .format(get_git_branch_name()), '\033[91m',
                                args.color))

    name = set_package_name(script_name, args.buildtype)

    build_release(script_name, args.buildtype, args.public, public_file_paths,
                  public_operations, args.color)

    print('Build complet.')

    if os.name == 'nt' and args.dontopenfolder:
        os.system('explorer.exe {}\\release'.format(rootDir))
Esempio n. 18
0
#!/usr/bin/env python
import argparse
import copy
from atoms import *
from io_yaml import *
#*****************************************************************************************
str1 = "Extract configurations during MD or geometry optimization from VASP output (vasprun.xml)."
parser = argparse.ArgumentParser(description=str1)
parser.add_argument("-last",
                    action='store_false',
                    help="if present, only last configuration will be written")
parser.add_argument('fn_input',
                    action="store",
                    type=str,
                    help="Name of the input file : vasprun.xml")
parser.add_argument('fn_output',
                    action="store",
                    type=str,
                    help="Name of the output file in yaml format")
args = parser.parse_args()
lastconf = not args.last

f = open(args.fn_input, "r")
atoms_all = []
ntypes = []
stypes = []
sat = []
iline_basis = -1
iline_atomtypes = -1
iline_positions = -1
iline_forces = -1
        for size in self.sizes:
            try:
                card_layout = size(*self._args, **self._kwargs)
                card_layout.draw(canvas)
                break
            except TemplateTooSmall:
                pass
      

class MonsterCard(CardGenerator):
    sizes = [MonsterCardSmall, MonsterCardLarge]


if __name__ == "__main__":
    
    parser = argparse.ArgumentParser(description="Generate D&D cards.")
    parser.add_argument("-t", "--type", help="What type of cards to generate",
                        action="store", default="monster", choices=["monster"],
                        dest="type")
    parser.add_argument("-o", "--out", help="Output file path",
                        action="store", default="cards.pdf", dest="output_path",
                        metavar="output_path")
    parser.add_argument("input", help="Path to input YAML file",
                        action="store")
    parser.add_argument("-f", "--fonts", help="What fonts to use when generating cards",
                        action="store", default="free", choices=["free", "accurate"],
                        dest="fonts")

    args = parser.parse_args()

    fonts = None
Esempio n. 20
0
def parse_command_line_arguments(logger):
    """
    Parse command line arguments received, if any
    Print example if invalid arguments are passed

    :param logger:  the logger
    :return:        config_filename passed as argument if any, else DEFAULT_CONFIG_FILENAME
                    export_formats passed as argument if any, else 'pdf'
                    list_preferences if passed as argument, else None
                    do_loop False if passed as argument, else True
    """
    parser = argparse.ArgumentParser()
    parser.add_argument('--config', help='config file to use, defaults to ' + DEFAULT_CONFIG_FILENAME)
    parser.add_argument('--docker', nargs='*', help='Switches settings to ENV variables for use with docker.')
    parser.add_argument('--format', nargs='*', help='formats to download, valid options are pdf, '
                                                    'json, docx, csv, media, web-report-link, actions, pickle, sql')
    parser.add_argument('--list_preferences', nargs='*', help='display all preferences, or restrict to specific'
                                                              ' template_id if supplied as additional argument')
    parser.add_argument('--loop', nargs='*', help='execute continuously until interrupted')
    parser.add_argument('--setup', action='store_true', help='Automatically create new directory containing the '
                                                             'necessary config file.'
                                                             'Directory will be named iAuditor Audit Exports, and will '
                                                             'be placed in your current directory')
    args = parser.parse_args()
    if args.docker is None:
        if args.config is None:
            rename_config_sample(logger)

        if args.config is not None:
            config_filename = os.path.join('configs', args.config)
            if os.path.isfile(config_filename):
                config_filename = os.path.join('configs', args.config)
                logger.debug(config_filename + ' passed as config argument')
            else:
                logger.error(config_filename + ' is either missing or corrupt.')
                sys.exit(1)
        else:
            config_filename = os.path.join('configs', DEFAULT_CONFIG_FILENAME)
    else:
        config_filename = None

    if args.setup:
        initial_setup(logger)
        exit()

    export_formats = ['pdf']
    if args.format is not None and len(args.format) > 0:
        valid_export_formats = ['json', 'docx', 'pdf', 'csv', 'media', 'web-report-link', 'actions', 'actions-sql',
                                'sql', 'pickle', 'doc_creation']
        export_formats = []
        for option in args.format:
            if option not in valid_export_formats:
                print('{0} is not a valid export format.  Valid options are pdf, json, docx, csv, web-report-link, '
                      'media, actions, pickle, actions_sql, or sql'.format(option))
                logger.info('invalid export format argument: {0}'.format(option))
            else:
                export_formats.append(option)

    loop_enabled = True if args.loop is not None else False
    docker_enabled = True if args.docker is not None else False

    return config_filename, export_formats, args.list_preferences, loop_enabled, docker_enabled
Esempio n. 21
0
# requires:
# - java (executed from the command line)
# - 

import sys
import os
import argparse

from filter_tiles import filter_tiles
from create_sift_features import create_sift_features
from match_sift_features import match_sift_features
from json_concat import json_concat
from optimize_montage_transform import optimize_montage_transform

# Command line parser
parser = argparse.ArgumentParser(description='A driver that does a 2D alignment of images.')
parser.add_argument('tiles_fname', metavar='tiles_json', type=str, 
                	help='a tile_spec file that contains all the images to be aligned in json format')
parser.add_argument('-w', '--workspace_dir', type=str, 
                	help='a directory where the output files of the different stages will be kept (default: current directory)',
                	default='.')
parser.add_argument('-r', '--render', action='store_true',
					help='render final result')
parser.add_argument('-o', '--output_file_name', type=str, 
                	help='the file that includes the output to be rendered in json format (default: output.json)',
                	default='output.json')
parser.add_argument('-j', '--jar_file', type=str, 
                	help='the jar file that includes the render (default: ../target/render-0.0.1-SNAPSHOT.jar)',
                	default='../target/render-0.0.1-SNAPSHOT.jar')
# the default bounding box is as big as the image can be
parser.add_argument('-b', '--bounding_box', type=str, 
Esempio n. 22
0
    # ax.plot(R,F)
    #
    # fig,ax = mpl.subplots(1,3,sharex=True,sharey=True,figsize=(20,8))
    # fig.subplots_adjust(wspace=0)
    # ax[0].imshow(-np.sqrt(nindx*nindx+nindy*nindy))
    # ax[1].imshow(-np.sqrt(indx*indx+(1/Parm.ba)**2*indy*indy))
    # ax[2].imshow(img)
    # ax[1].set_title("bouwens")
    return nimg


if __name__ == "__main__":


    parser = argparse.ArgumentParser(description = """Main script to fit galaxy
    morphology using MCMC. It takes a pre-formatted hdf5 file containing all
    data relevant to the fit. It is possible to run on individual subsets.""")
    parser.add_argument("configfile",metavar='NAME',type=str)
    parser.add_argument('-c','--clusters',default="",metavar='LIST',type=str)
    parser.add_argument('-i','--indices',default="",type=str,metavar='SET')
    parser.add_argument('--name',default="",type=str,metavar='ID')
    parser.add_argument('-n','--nrun',default=1000,type=int,metavar='NRUN')
    parser.add_argument('-e','--nexclude',default=900,type=int,metavar='NRUN')
    parser.add_argument('--nchain',default=160,type=int,metavar='NRUN')
    parser.add_argument("-P","--plot", action='store_true')
    parser.add_argument("-S","--show", action='store_true')
    parser.add_argument("-L","--nolensing", action="store_true")
    parser.add_argument("-N","--sersicfree", action="store_true")

    args = parser.parse_args()
Esempio n. 23
0
def get_args_parser():
    parser = argparse.ArgumentParser('Set transformer detector', add_help=False)
    parser.add_argument('--lr', default=1e-4, type=float)
    parser.add_argument('--lr_backbone', default=1e-5, type=float)
    parser.add_argument('--batch_size', default=8, type=int)
    parser.add_argument('--weight_decay', default=1e-4, type=float)
    parser.add_argument('--epochs', default=300, type=int)
    parser.add_argument('--lr_drop', default=200, type=int)
    parser.add_argument('--clip_max_norm', default=0.1, type=float,
                        help='gradient clipping max norm')

    # Model parameters
    parser.add_argument('--frozen_weights', type=str, default=None,
                        help="Path to the pretrained model. If set, only the mask head will be trained")
    # * Backbone
    parser.add_argument('--backbone', default='resnet50', type=str,
                        help="Name of the convolutional backbone to use")
    parser.add_argument('--dilation', action='store_true',
                        help="If true, we replace stride with dilation in the last convolutional block (DC5)")
    parser.add_argument('--position_embedding', default='sine', type=str, choices=('sine', 'learned'),
                        help="Type of positional embedding to use on top of the image features")

    # * Transformer
    parser.add_argument('--enc_layers', default=6, type=int,
                        help="Number of encoding layers in the transformer")
    parser.add_argument('--dec_layers', default=6, type=int,
                        help="Number of decoding layers in the transformer")
    parser.add_argument('--dim_feedforward', default=2048, type=int,
                        help="Intermediate size of the feedforward layers in the transformer blocks")
    parser.add_argument('--hidden_dim', default=256, type=int,
                        help="Size of the embeddings (dimension of the transformer)")
    parser.add_argument('--dropout', default=0.1, type=float,
                        help="Dropout applied in the transformer")
    parser.add_argument('--nheads', default=8, type=int,
                        help="Number of attention heads inside the transformer's attentions")
    parser.add_argument('--num_queries', default=100, type=int,
                        help="Number of query slots")
    parser.add_argument('--pre_norm', action='store_true')

    # * Segmentation
    parser.add_argument('--masks', action='store_true',
                        help="Train segmentation head if the flag is provided")

    # Loss
    parser.add_argument('--no_aux_loss', dest='aux_loss', action='store_false',
                        help="Disables auxiliary decoding losses (loss at each layer)")
    # * Matcher
    parser.add_argument('--set_cost_class', default=1, type=float,
                        help="Class coefficient in the matching cost")
    parser.add_argument('--set_cost_bbox', default=5, type=float,
                        help="L1 box coefficient in the matching cost")
    parser.add_argument('--set_cost_giou', default=2, type=float,
                        help="giou box coefficient in the matching cost")
    # * Loss coefficients
    parser.add_argument('--mask_loss_coef', default=1, type=float)
    parser.add_argument('--dice_loss_coef', default=1, type=float)
    parser.add_argument('--bbox_loss_coef', default=5, type=float)
    parser.add_argument('--giou_loss_coef', default=2, type=float)
    parser.add_argument('--eos_coef', default=0.1, type=float,
                        help="Relative classification weight of the no-object class")

    # dataset parameters
    parser.add_argument('--dataset_file', default='coco')
    parser.add_argument('--coco_path', type=str)
    parser.add_argument('--coco_panoptic_path', type=str)
    parser.add_argument('--remove_difficult', action='store_true')

    parser.add_argument('--output_dir', default='',
                        help='path where to save, empty for no saving')
    parser.add_argument('--device', default='cuda',
                        help='device to use for training / testing')
    parser.add_argument('--seed', default=42, type=int)
    parser.add_argument('--resume', default='', help='resume from checkpoint')
    parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
                        help='start epoch')
    parser.add_argument('--eval', action='store_true')
    parser.add_argument('--num_workers', default=2, type=int)

    # distributed training parameters
    parser.add_argument('--world_size', default=1, type=int,
                        help='number of distributed processes')
    parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training')
    return parser
Esempio n. 24
0
def parse_arguments():
    parser = argparse.ArgumentParser(prog='ENCODE DCC ATAQC.',
                                     description='ATAQC')
    parser.add_argument('--paired-end',
                        action="store_true",
                        help='Paired-end BAM.')
    parser.add_argument('--bowtie2-log',
                        type=str,
                        help='Read bowtie2 log file (from task bowtie2).')
    parser.add_argument('--read-len-log',
                        type=str,
                        help='Read length log file (from task bowtie2).')
    parser.add_argument('--bam', type=str, help='Raw BAM file.')
    parser.add_argument(
        '--flagstat-log',
        type=str,
        help='Flagstat log file for Raw BAM (from task bowtie2).')
    parser.add_argument('--nodup-bam',
                        type=str,
                        help='Raw BAM file (from task filter).')
    parser.add_argument(
        '--nodup-flagstat-log',
        type=str,
        help='Flagstat log file for deduped BAM file (from task filter).')
    parser.add_argument(
        '--pbc-log',
        type=str,
        help='PBC log file for deduped BAM file (from task filter).')
    parser.add_argument(
        '--dup-log',
        type=str,
        help='Dup log file for deduped BAM file (from task filter).')
    parser.add_argument('--mito-dup-log',
                        type=str,
                        help='Mito dup log file (from task filter).')
    parser.add_argument('--ta',
                        type=str,
                        help='TAG-ALIGN file (from task bam2ta).')
    parser.add_argument('--bigwig',
                        type=str,
                        help='BIGWIG file (from task macs2).')
    parser.add_argument('--peak',
                        type=str,
                        help='Raw NARROWPEAK file (from task macs2).')
    parser.add_argument(
        '--overlap-peak',
        type=str,
        help='Overlapping NARROWPEAK file (from task overlap).')
    parser.add_argument('--idr-peak',
                        type=str,
                        help='IDR NARROWPEAK file (from task idr).')
    parser.add_argument('--ref-fa', type=str, help='Reference fasta file.')
    parser.add_argument('--chrsz',
                        type=str,
                        help='2-col chromosome sizes file.')
    parser.add_argument('--tss', type=str, help='TSS definition bed file.')
    parser.add_argument('--dnase', type=str, help='DNase definition bed file.')
    parser.add_argument('--blacklist', type=str, help='Blacklist bed file.')
    parser.add_argument('--prom',
                        type=str,
                        help='Promoter definition bed file.')
    parser.add_argument('--enh',
                        type=str,
                        help='Enhancer definition bed file.')
    parser.add_argument('--reg2map', type=str, help='Reg2map file.')
    parser.add_argument('--reg2map-bed', type=str, help='Reg2map bed file.')
    parser.add_argument('--roadmap-meta',
                        type=str,
                        help='Roadmap metadata file.')
    parser.add_argument('--mito-chr-name',
                        default='chrM',
                        type=str,
                        help='Mito chromosome name.')
    parser.add_argument('--out-dir',
                        default='',
                        type=str,
                        help='Output directory.')
    parser.add_argument('--log-level',
                        default='INFO',
                        help='Log level',
                        choices=[
                            'NOTSET', 'DEBUG', 'INFO', 'WARNING', 'CRITICAL',
                            'ERROR', 'CRITICAL'
                        ])
    args = parser.parse_args()
    log.setLevel(args.log_level)
    log.info(sys.argv)
    return args
Esempio n. 25
0
def main():
    # Parse arguments and pass through unrecognised args
    parser = argparse.ArgumentParser(
        add_help=False,
        usage='%(prog)s [test_runner.py options] [script options] [scripts]',
        description=__doc__,
        epilog='''
    Help text and arguments for individual test script:''',
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument(
        '--ansi',
        action='store_true',
        default=sys.stdout.isatty(),
        help=
        "Use ANSI colors and dots in output (enabled by default when standard output is a TTY)"
    )
    parser.add_argument(
        '--combinedlogslen',
        '-c',
        type=int,
        default=0,
        metavar='n',
        help=
        'On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.'
    )
    parser.add_argument(
        '--coverage',
        action='store_true',
        help='generate a basic coverage report for the RPC interface')
    parser.add_argument(
        '--ci',
        action='store_true',
        help=
        'Run checks and code that are usually only enabled in a continuous integration environment'
    )
    parser.add_argument(
        '--exclude',
        '-x',
        help='specify a comma-separated-list of scripts to exclude.')
    parser.add_argument(
        '--extended',
        action='store_true',
        help='run the extended test suite in addition to the basic tests')
    parser.add_argument('--help',
                        '-h',
                        '-?',
                        action='store_true',
                        help='print help text and exit')
    parser.add_argument(
        '--jobs',
        '-j',
        type=int,
        default=4,
        help='how many test scripts to run in parallel. Default=4.')
    parser.add_argument(
        '--keepcache',
        '-k',
        action='store_true',
        help=
        'the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.'
    )
    parser.add_argument(
        '--quiet',
        '-q',
        action='store_true',
        help='only print dots, results summary and failure logs')
    parser.add_argument('--tmpdirprefix',
                        '-t',
                        default=tempfile.gettempdir(),
                        help="Root directory for datadirs")
    parser.add_argument('--failfast',
                        action='store_true',
                        help='stop execution after the first test failure')
    parser.add_argument('--filter',
                        help='filter scripts to run by regular expression')

    args, unknown_args = parser.parse_known_args()
    if not args.ansi:
        global BOLD, GREEN, RED, GREY
        BOLD = ("", "")
        GREEN = ("", "")
        RED = ("", "")
        GREY = ("", "")

    # args to be passed on always start with two dashes; tests are the remaining unknown args
    tests = [arg for arg in unknown_args if arg[:2] != "--"]
    passon_args = [arg for arg in unknown_args if arg[:2] == "--"]

    # Read config generated by configure.
    config = configparser.ConfigParser()
    configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
    config.read_file(open(configfile, encoding="utf8"))

    passon_args.append("--configfile=%s" % configfile)

    # Set up logging
    logging_level = logging.INFO if args.quiet else logging.DEBUG
    logging.basicConfig(format='%(message)s', level=logging_level)

    # Create base test directory
    tmpdir = "%s/test_runner_₿_🏃_%s" % (
        args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))

    os.makedirs(tmpdir)

    logging.debug("Temporary test directory at %s" % tmpdir)

    enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")

    if not enable_bitcoind:
        print("No functional tests to run.")
        print("Rerun ./configure with --with-daemon and then make")
        sys.exit(0)

    # Build list of tests
    test_list = []
    if tests:
        # Individual tests have been specified. Run specified tests that exist
        # in the ALL_SCRIPTS list. Accept names with or without a .py extension.
        # Specified tests can contain wildcards, but in that case the supplied
        # paths should be coherent, e.g. the same path as that provided to call
        # test_runner.py. Examples:
        #   `test/functional/test_runner.py test/functional/wallet*`
        #   `test/functional/test_runner.py ./test/functional/wallet*`
        #   `test_runner.py wallet*`
        #   but not:
        #   `test/functional/test_runner.py wallet*`
        # Multiple wildcards can be passed:
        #   `test_runner.py tool* mempool*`
        for test in tests:
            script = test.split("/")[-1]
            script = script + ".py" if ".py" not in script else script
            if script in ALL_SCRIPTS:
                test_list.append(script)
            else:
                print("{}WARNING!{} Test '{}' not found in full test list.".
                      format(BOLD[1], BOLD[0], test))
    elif args.extended:
        # Include extended tests
        test_list += ALL_SCRIPTS
    else:
        # Run base tests only
        test_list += BASE_SCRIPTS

    # Remove the test cases that the user has explicitly asked to exclude.
    if args.exclude:
        exclude_tests = [
            test.split('.py')[0] for test in args.exclude.split(',')
        ]
        for exclude_test in exclude_tests:
            # Remove <test_name>.py and <test_name>.py --arg from the test list
            exclude_list = [
                test for test in test_list
                if test.split('.py')[0] == exclude_test
            ]
            for exclude_item in exclude_list:
                test_list.remove(exclude_item)
            if not exclude_list:
                print("{}WARNING!{} Test '{}' not found in current test list.".
                      format(BOLD[1], BOLD[0], exclude_test))

    if args.filter:
        test_list = list(filter(re.compile(args.filter).search, test_list))

    if not test_list:
        print(
            "No valid test scripts specified. Check that your test is in one "
            "of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests"
        )
        sys.exit(0)

    if args.help:
        # Print help for test_runner.py, then print help of the first script (with args removed) and exit.
        parser.print_help()
        subprocess.check_call([
            sys.executable,
            os.path.join(config["environment"]["SRCDIR"], 'test', 'functional',
                         test_list[0].split()[0]), '-h'
        ])
        sys.exit(0)

    check_script_list(src_dir=config["environment"]["SRCDIR"],
                      fail_on_warn=args.ci)
    check_script_prefixes()

    if not args.keepcache:
        shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"],
                      ignore_errors=True)

    run_tests(
        test_list=test_list,
        src_dir=config["environment"]["SRCDIR"],
        build_dir=config["environment"]["BUILDDIR"],
        tmpdir=tmpdir,
        jobs=args.jobs,
        enable_coverage=args.coverage,
        args=passon_args,
        combined_logs_len=args.combinedlogslen,
        failfast=args.failfast,
        use_term_control=args.ansi,
    )
Esempio n. 26
0
    if args.test_root:
        approved = v.find_fast_tests(args.test_root, args.cost)
        approved.sort()
        v.generate_test_scripts(approved, os.path.abspath(args.test_root),
                                args.top, args.target)

    else:
        v.parse_qa_log(args.logfile)
        v.compare_all_failures()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=
        "Generate a NWChem test battery with serial and MPI execution scripts, or check the test results from a completed test battery.",
        epilog=
        "Example: generate a fast test battery where each test has cost no greater than 100:\n qacheck.py -c 100 -t /opt/science/nwchem/Nwchem-6.3.revision25564-src.2014-05-03/QA\nExample: run tests and then check them: \n cd /opt/science/nwchem/Nwchem-6.3.revision25564-src.2014-05-03/QA\n ./runserial | tee quick.log\n qacheck.py -l quick.log"
    )
    parser.add_argument(
        "-c",
        "--cost",
        help=
        "Maximum cost (wall clock time multiplied by number of processors) of tests to include in test battery.",
        type=int,
        default=1000)
    parser.add_argument(
        "--top",
        help="NWCHEM_TOP location of tree where NWChem was built/installed.",
        default="/opt/science/nwchem/current")
    parser.add_argument(
Esempio n. 27
0
                    int(float(_box.xmin)),
                    int(float(_box.ymin)),
                    int(float(_box.xmax)),
                    int(float(_box.ymax))
                ]
                print('{}\t{}\t{}\t{}\t{}'.format(iid, 'label:', label_name,
                                                  score, box))

            # draw bounding boxes on the image using labels
            #draw_boxes(image, boxes, config['model']['labels'], obj_thresh)

            # write the image with bounding boxes to file
            #cv2.imwrite(output_path + image_path.split('/')[-1], np.uint8(image))


if __name__ == '__main__':
    argparser = argparse.ArgumentParser(
        description='Predict with a trained yolo model')
    argparser.add_argument('-c', '--conf', help='path to configuration file')
    argparser.add_argument(
        '-i',
        '--input',
        help='path to an image, a directory of images, a video, or webcam')
    argparser.add_argument('-o',
                           '--output',
                           default='output/',
                           help='path to output directory')

    args = argparser.parse_args()
    _main_(args)
                                            _to=sample.gid()),
                              emit_backref=True)
            for rf, value in line.items():
                if rf in [cancer_id, "Labels"]:
                    continue
                value = int(value)
                emitter.emit_edge(Sample_Split(
                    _from=sample.gid(),
                    _to=Split.make_gid("%s:%s" % (cancer_id, rf)),
                    type="testing" if value == 1 else "training"),
                                  emit_backref=True)

    emitter.close()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument('--input-matrix',
                        '-i',
                        type=str,
                        required=True,
                        help='CVfold matrix')
    parser.add_argument('--emitter-prefix',
                        '-p',
                        type=str,
                        required=True,
                        help='emitter prefix')
    args = parser.parse_args()
    transform_one(args.input_matrix, args.emitter_prefix)
            logging.info("Found %d objects for dataset %s" % (len(dataset_objs), prefix))
            results.setdefault(bucket, []).extend(dataset_objs)

    # print results per bucket
    for bucket in sorted(results):
        logging.info("Found %d osaka no-clobber errors for bucket %s" % (len(results[bucket]), bucket))

    # perform cleanup
    for bucket in sorted(results):
        # chunk
        chunks = [results[bucket][x:x+S3_MAX_DELETE_CHUNK] for x in xrange(0, len(results[bucket]), S3_MAX_DELETE_CHUNK)]
        
        for chunk in chunks:
            if force:
                del_obj = {"Objects": [{'Key': obj} for obj in chunk]}
                logging.info(json.dumps(del_obj, indent=2))
                client.delete_objects(Bucket=bucket, Delete=del_obj)
            else:
                logging.info("Running dry-run. These objects would've been deleted:")
                for obj in chunk: logging.info(obj)


if __name__ == "__main__":
    jobs_es_url = app.conf['JOBS_ES_URL']
    grq_es_url = app.conf['GRQ_ES_URL']
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-f', '--force', help="force deletion", action='store_true')
    args = parser.parse_args()

    clean(jobs_es_url, grq_es_url, args.force)
Esempio n. 30
0
def main():
    # Init parser
    parser = argparse.ArgumentParser( prog = 'ClinVar Ingester', description = "Use -x and -c flags to parse XML and "\
                                     "CSV files, respectively.\nUse -s, -n and -v flags to write Significance, Name match "\
                                     " and variant_summary tables, respectively. Use -d flag to drop existing tables.\n"\
                                     "ex: mail.py -xsn" )
    # Arguments description
    parser.add_argument( "-x", help = "XML-file parsing flag", action='store_true' )
    parser.add_argument( "--xml-file", help = "Change path to XML file to import", default = XML_FILE )
    parser.add_argument( "-c", help = "CSV-file parsing flag", action='store_true' )
    parser.add_argument( "--csv-file", help = "Change path to CSV file to import", default = CSV_FILE )
    parser.add_argument( "-s", help = "Significance+ID+RCVAccession table write flag", action='store_true' )
    parser.add_argument( "--sig-table", help = "Change Significance+ID+RCVAccession table name", default = TABLE_SIG )
    parser.add_argument( "-n", help = "Submitter ID+Name table write flag", action='store_true' )
    parser.add_argument( "--name-table", help = "Change Submitter ID+Name table nane", default = TABLE_SUB )
    parser.add_argument( "-v", help = "variant_summary table write flag", action='store_true' )
    parser.add_argument( "--var_table", help = "Change variant_summary table name", default = TABLE_VAR )
    parser.add_argument( "-d", help = "Drop old tables, instead of rename", action='store_true' )
    parser.add_argument( "--log-file", "-l", help = "Change path to log file",
                        default = "{}_clinvar.log".format( time.strftime("%d_%m_%Y") ) )
    parser.add_argument( "--database", help = "Change Database/Namespace", default = DATABASE )
    parser.add_argument( "--port", type = int, help = "Change DBMS port", default =  PORT )
    parser.add_argument("--user", help = "Change DBMS user", default = USER )
    parser.add_argument("--password", help = "Change DBMS password", default = PASSWORD )
    # Parse arguments
    args = parser.parse_args( )
    # Init logging
    g_logger = logging.getLogger( 'clinvar' )
    g_handler = logging.FileHandler( args.log_file, mode='w', encoding='utf8' )
    g_formatter = logging.Formatter( '%(levelname)s@%(name)s:[%(asctime)s]>>> %(message)s' )
    g_handler.setFormatter( g_formatter )
    g_logger.addHandler( g_handler )
    g_logger.setLevel( logging.INFO )
    logger = logging.getLogger('clinvar.Main')
    # Greeting
    logger.info( "Hello." )
    logger.debug( args )#'{}\n\t{}'.format( args.xml_file, args.x_file ) )
    if args.x and ( args.s or args.n ) :
        # Processing of XML file
        logger.info( "Start ingesting XML file {}.".format( args.xml_file ) )
        # Init and get batch generator
        batch_gen = (XML_File( args.xml_file, REFERENCE_PATHS, ASSERTION_PATHS, TABLES_L ).
                           get_batch( BATCH_SIZE ) )
        if args.s :
            # Init database significance table
            sig = Table( args.database, args.port, args.user, args.password,
                       args.sig_table, COLUMN_SIG, TYPE_SIG, INDEX_SIG, True, True, args.d )
        # Significance insertion, submitter accumulation
        sub_dict = { } # unique sumitter dict
        ic = 0 # insert counter
        t = tb = time.time( ) # rate time, batch rate time
        for batch_list in batch_gen:
            # Dict and list accumulation
            insert_list = []
            for d in batch_list:
                if d[ 0 ][ 0 ] not in sub_dict:
                    sub_dict.update( { d[ 0 ][ 0 ]:d[ 0 ][ 1 ] } )
                elif sub_dict[ d[ 0 ][ 0 ] ] != d[ 0 ][ 1 ] :
                    if sub_dict[ d[ 0 ][ 0 ] ] is None :
                        sub_dict[ d[ 0 ][ 0 ] ] = d[ 0 ][ 1 ]
                    elif d[ d [ 0 ][ 0 ] ] :
                        logger.debug( 'Name {} for {} known as {}.'.
                                        format( d[ 1 ], d[ 0 ], sub_dict[ d[ 0 ] ] )
                                    )
                insert_list += [ d [ 1 ] ]
            if args.s :
                # Data base insertion
                ic += sig.insert( insert_list )
                logger.debug( '{}; {}'.format( insert_list[ 0 ], len(insert_list) ) )
                logger.info( 'Significance = {}; Batch Rate = {};  General rate = {}.'.
                            format( ic, len( insert_list ) / ( time.time( ) - tb ), 
                                    ic / ( time.time( ) - t ) )
                        )
            tb = time.time( )
        if args.n :
            # Init database submitters table
            sub = Table( args.database, args.port, args.user, args.password,
                    args.name_table, COLUMN_SUB, TYPE_SUB, INDEX_SUB, True, True, args.d )
            # Submitter insertion
            sub_list = [ ] # unique submitter list
            ic = 0 # insert counter
            t = tb = time.time( ) # rate time, batch rate time
            for sid in sub_dict :
                sub_list += [ ( sid, sub_dict[ sid ] ) ]
                if len( sub_list ) >= BATCH_SIZE :
                    # Data base insertion
                    ic += sub.insert( sub_list ) 
                    logger.debug( sub_list[ 0 ] )
                    logger.info( 'Submitters = {}; Batch Rate = {};  General rate = {}.'.
                                format( ic, len( sub_list ) / ( time.time( ) - tb ),
                                    ic / ( time.time( ) - t ) )
                                )
                    del sub_list[ : ]
                    tb = time.time( )
            ic += sub.insert( sub_list ) 
            logger.debug( sub_list[ 0 ] )
            logger.info( 'Submitters = {}; Batch Rate = {};  General rate = {}.'.
                        format( ic, len( sub_list ) / ( time.time( ) - tb ),
                                        ic / ( time.time( ) - t ) )
                        )
        logger.info( "Ingested {} in {}.".format( args.xml_file, time.time( ) - t ) )
    if args.c and args.v :
        # Processing of CSV file
        logger.info( "Start ingest CSV file {}.".format( args.csv_file ) )
        # Init and get batch generator
        batch_gen = CSV_File( args.csv_file ).get_batch( BATCH_SIZE )
        # Init database submissions table
        var = Table( args.database, args.port, args.user, args.password,
                       args.var_table, COLUMN_VAR, TYPE_VAR, INDEX_VAR, True, True, args.d )
        # Submission insertion
        ic = 0 # insert counter
        t = tb = time.time( ) # rate time, batch rate time
        for batch_list in batch_gen :
            # Data base insertion
            ic += var.insert( batch_list )
            logger.debug( batch_list[ 0 ] )
            logger.info( 'Submissions = {}; Batch Rate = {};  General rate = {}.'.
                            format( ic, len( batch_list ) / ( time.time( ) - tb ), 
                                    ic / ( time.time( ) - t ) )
                        )
            tb = time.time( )
        logger.info( "Ingested {} in {}.".format( args.csv_file, time.time( ) - t ) )
    # Farewell
    logger.info( "Bye bye." )