def main():
    parser = argparse.ArgumentParser(description='Interface with MTurk.')
    parser.add_argument("subcommand", choices=['posthit', 'getresults', 'reformat',
        "preparefiles", "anonymize"],
        type=str, action="store",
        help="choose a specific subcommand.")
    parser.add_argument("nameofexperimentfiles", metavar="label", type=str, nargs="+",
        help="you must have at least one label that corresponds to the " +
        "experiment you want to work with. each experiment has a unique label. " +
        "this will be the beginning of the name of the config file (everything " +
        "before the dot). [label].config.")
    # parser.add_argument("--live", dest='is_live', action='store_const',
    #   const=True, default=False, help="interface with live site (defaults to sandbox)")
    # parser.add_argument("--posthit", help="post a hit to mturk. requires label")
    # parser.add_argument("--getresults", help="get results from mturk. requires label")
    # parser.add_argument("--reformat")
    # parser.add_argument("--preparefiles")
    # parser.add_argument("--anonymize")
    # parser.add_argument('integers', metavar='N', type=int, nargs='+',
    #                    help='an integer for the accumulator')
    # parser.add_argument('--sum', dest='accumulate', action='store_const',
    #                    const=sum, default=max,
    #                    help='sum the integers (default: find the max)')

    # parser = argparse.ArgumentParser(description='Interface with MTurk.')
    # parser.add_argument('integers', metavar='N', type=int, nargs='+',
    #                    help='an integer for the accumulator')
    # parser.add_argument('--sum', dest='accumulate', action='store_const',
    #                    const=sum, default=max,
    #                    help='sum the integers (default: find the max)')

    args = parser.parse_args()
    # print args.accumulate(args.integers)

    subcommand = vars(args)["subcommand"]
    labels = vars(args)["nameofexperimentfiles"]

    for label in labels:
        if subcommand == "posthit":
            prepare(label)
            posthit(label)
        elif subcommand == "getresults":
            getresults(label)
            anonymize(label + ".results")
            make_invoice(label)
        elif subcommand == "reformat":
            try:
                try:
                    reformat(label + ".results")
                    make_invoice(label)
                except IOError:
                    reformat(label + "_anonymized.results")
            except IOError:
                print ("\nWARNING: cannot find file `" + label +
                ".results` or its anonymized version `" + label +
                "_anonymized.results`.\nSKIPPING!\n")
        elif subcommand == "preparefiles":
            prepare(label)
        elif subcommand == "anonymize":
            anonymize(label + ".results")
Example #2
0
 def __eq__(self, obj):
     try:
         if type(obj) == type(self) and vars(obj) == vars(self):
             return True
     except Exception:
         pass
     return False
Example #3
0
    def add_inferred_output_like(self, data_param, task_param, name):
        """ This function adds entries to parameter objects to enable
        the evaluation action to automatically read in the output of a 
        previous inference run if inference is not explicitly specified.

        This can be used in an application if there is a data section
        entry in the configuration file that matches the inference output.
        In supervised learning, the reference data section would often
        match the inference output and could be used here. Otherwise, 
        a template data section could be used.

        :param data_param:
        :param task_param:
        :param name:  name of input parameter to copy parameters from
        :return: modified data_param and task_param
        """
        print(task_param)
        # Add the data parameter
        if 'inferred' not in data_param:
            data_name = vars(task_param)[name][0]
            inferred_param = Namespace(**vars(data_param[data_name]))
            inferred_param.csv_file = os.path.join(
                self.action_param.save_seg_dir, 'inferred.csv')
            data_param['inferred'] = inferred_param
        # Add the task parameter
        if 'inferred' not in task_param or len(task_param.inferred)==0:
            task_param.inferred = ('inferred',)
        return data_param, task_param
Example #4
0
    def function(self):
        """
        Operation function with decorators

        :rtype: types.FunctionType
        """

        function = parameter_to_arg(self.parameters, self.__undecorated_function)

        if self.validate_responses:
            logger.debug('... Response validation enabled.')
            response_decorator = self.__response_validation_decorator
            logger.debug('... Adding response decorator (%r)', response_decorator)
            function = response_decorator(function)

        produces_decorator = self.__content_type_decorator
        logger.debug('... Adding produces decorator (%r)', produces_decorator, extra=vars(self))
        function = produces_decorator(function)

        for validation_decorator in self.__validation_decorators:
            function = validation_decorator(function)

        # NOTE: the security decorator should be applied last to check auth before anything else :-)
        security_decorator = self.security_decorator
        logger.debug('... Adding security decorator (%r)', security_decorator, extra=vars(self))
        function = security_decorator(function)

        if UWSGIMetricsCollector.is_available():  # pragma: no cover
            decorator = UWSGIMetricsCollector(self.path, self.method)
            function = decorator(function)

        return function
Example #5
0
def _install_handlers(cp, formatters):
    """Install and return handlers"""
    hlist = cp["handlers"]["keys"]
    if not len(hlist):
        return {}
    hlist = hlist.split(",")
    hlist = _strip_spaces(hlist)
    handlers = {}
    fixups = [] #for inter-handler references
    for hand in hlist:
        section = cp["handler_%s" % hand]
        klass = section["class"]
        fmt = section.get("formatter", "")
        try:
            klass = eval(klass, vars(logging))
        except (AttributeError, NameError):
            klass = _resolve(klass)
        args = section["args"]
        args = eval(args, vars(logging))
        h = klass(*args)
        if "level" in section:
            level = section["level"]
            h.setLevel(logging._levelNames[level])
        if len(fmt):
            h.setFormatter(formatters[fmt])
        if issubclass(klass, logging.handlers.MemoryHandler):
            target = section.get("target", "")
            if len(target): #the target handler may not be loaded yet, so keep for later...
                fixups.append((h, target))
        handlers[hand] = h
    #now all handlers are loaded, fixup inter-handler references...
    for h, t in fixups:
        h.setTarget(handlers[t])
    return handlers
def getScoreOfExample(rule, dataSetExample):
    # print "dataSetExample:", dataSetExample
    # create variables that are the features of the example
    for feature in featuresInDataSet:
        if dataSetExample[feature] == SAME or dataSetExample[feature] == DIFFERENT:
            vars()[feature] = dataSetExample[feature]
        else:
            vars()[feature] = int(dataSetExample[feature])
        # FIVE ####vars()[feature] = dataSetExample[feature]
    # eval preCondition and condition to know what score to choose
    score = 0
    for sNodeAndItsAssociatedPNodes in rule:
        # print "Current Rule:", sNodeAndItsAssociatedPNodes
        # print "before score:", score
        if eval(sNodeAndItsAssociatedPNodes.preCondition):
            if eval(sNodeAndItsAssociatedPNodes.condition):
                # print("going to alpha1")
                score += sNodeAndItsAssociatedPNodes.alpha1
            else:
                # print("going to alpha2")
                score += sNodeAndItsAssociatedPNodes.alpha2
        else:
            score += 0
        # print "after score:", score
    return score
Example #7
0
  def test_strategies_names_introspection(self):
    sd = StrategyDict()
    sd.strategy("first", "abc")(lambda val: "abc" + val)
    sd.strategy("second", "def")(lambda val: "def" + val) # Neglect 2nd name
    sd.strategy("third", "123")(lambda val: "123" + val) # Neglect 2nd name

    # Nothing new here: strategies do what they should...
    assert sd("x") == "abcx"
    assert sd.default("p") == "abcp"

    assert sd.first("w") == "abcw" == sd["first"]("w")
    assert sd.second("zsc") == "defzsc" == sd["second"]("zsc")
    assert sd.third("blah") == "123blah" == sd["third"]("blah")

    assert sd.abc("y") == "abcy" == sd["abc"]("y")
    assert sd["def"]("few") == "deffew"
    assert sd["123"]("lots") == "123lots"

    # Valid names for attributes
    all_names = {"first", "second", "third", "abc", "def", "123"}
    assert all(name in dir(sd) for name in all_names)
    assert all(name in vars(sd) for name in all_names)
    assert "default" in dir(sd)
    assert "default" in vars(sd)
    all_keys_tuples = sd.keys()
    all_keys = reduce(operator.concat, all_keys_tuples)
    assert set(all_keys) == all_names # Default not in keys
    assert set(all_keys_tuples) == {("first", "abc"),
                                    ("second", "def"),
                                    ("third", "123")}

    # First name is the __name__
    assert sd["abc"].__name__ == "first"
    assert sd["def"].__name__ == "second"
    assert sd["123"].__name__ == "third"
Example #8
0
def getvarsfromobj(obj):
    ''' Return a list with the numeric variables of the passed object.'''
    def is_num(o):
        ''' py3 replacement of operator.isNumberType.'''
        return isinstance(o, Number) or \
            (isinstance(o, np.ndarray) and o.dtype.kind not in 'OSUV')
    return (obj, [name for name in vars(obj) if is_num(vars(obj)[name])])
    def getargs(self,moduleName,className,method) :
        '''
          This will return the list of arguments in a method of python module of class.
          It accepts method list as an argument.
        '''
        print "Message : Argument list is being obtained for each method"
        methodArgsDict = {}
        if className == None:
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) -1], globals(), locals(), [moduleList[len(moduleList) -2]], -1)
                try :
                    names = vars(Module)[name]
                except KeyError:
                    print "Message : method '" + name + "'does not exists,Continued with including it. "
                    return False
                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]
        else :
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [className], -1)
                Class = getattr(Module, className)
                try :
                    names = vars(Class)[name]
                except KeyError :
                    print "Message : method '" + name + "'does not exists,Continued with include it."
                    return False

                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]

        return methodArgsDict
Example #10
0
 def combine(self, *args, **kwargs):
     from ..frame.struct import struct
     for framegroup in zip(self._sources):
         f = struct(framegroup[0])
         for q in framegroup[1:]:
             vars(f).update(vars(q))
         yield f
  def test_good_configuration(self):
    info = dispatchinfo.DispatchInfoExternal(
        application='appid',
        dispatch=[
            dispatchinfo.DispatchEntry(url='*/path', module='foo'),
            dispatchinfo.DispatchEntry(url='domain.com/path', module='bar'),
            dispatchinfo.DispatchEntry(url='*/path/*', module='baz'),
            dispatchinfo.DispatchEntry(url='*.domain.com/path/*', module='foo'),
            ])

    os.path.getmtime('/appdir/dispatch.yaml').AndReturn(123.456)
    application_configuration.DispatchConfiguration._parse_configuration(
        '/appdir/dispatch.yaml').AndReturn(info)

    self.mox.ReplayAll()
    config = application_configuration.DispatchConfiguration(
        '/appdir/dispatch.yaml')
    self.mox.VerifyAll()

    self.assertEqual(123.456, config._mtime)
    self.assertEqual(2, len(config.dispatch))
    self.assertEqual(vars(dispatchinfo.ParsedURL('*/path')),
                     vars(config.dispatch[0][0]))
    self.assertEqual('foo', config.dispatch[0][1])
    self.assertEqual(vars(dispatchinfo.ParsedURL('*/path/*')),
                     vars(config.dispatch[1][0]))
    self.assertEqual('baz', config.dispatch[1][1])
Example #12
0
File: base.py Project: keemy/django
    def run_from_argv(self, argv):
        """
        Set up any environment changes requested (e.g., Python path
        and Django settings), then run this command. If the
        command raises a ``CommandError``, intercept it and print it sensibly
        to stderr. If the ``--traceback`` option is present or the raised
        ``Exception`` is not ``CommandError``, raise it.
        """
        self._called_from_command_line = True
        parser = self.create_parser(argv[0], argv[1])

        if self.use_argparse:
            options = parser.parse_args(argv[2:])
            cmd_options = vars(options)
            # Move positional args out of options to mimic legacy optparse
            args = cmd_options.pop('args', ())
        else:
            options, args = parser.parse_args(argv[2:])
            cmd_options = vars(options)
        handle_default_options(options)
        try:
            self.execute(*args, **cmd_options)
        except Exception as e:
            if options.traceback or not isinstance(e, CommandError):
                raise

            # SystemCheckError takes care of its own formatting.
            if isinstance(e, SystemCheckError):
                self.stderr.write(str(e), lambda x: x)
            else:
                self.stderr.write('%s: %s' % (e.__class__.__name__, e))
            sys.exit(1)
        finally:
            connections.close_all()
 def __init__(self, dist, **kw):
     """
     Construct the command for dist, updating
     vars(self) with any keyword parameters.
     """
     _Command.__init__(self, dist)
     vars(self).update(kw)
Example #14
0
    def test_deleted_tracker(self, reactortime):
        tracker = MixedTracker(TreeRootNode, Node,
                reactor=reactortime)

        def callback(passed_tracker):  # pragma: no cover
            assert False, "shouldn't have been called"

        # anchor retains a reference to the deferred
        anchor_1 = tracker._set_alarm(callback=callback,
                delta=timedelta(seconds=30))
        anchor_2 = tracker._set_alarm(callback=callback,
                delta=timedelta(seconds=60))

        # break the reference loop before throwing it away
        root_anchor = tracker.root
        vars(root_anchor).clear()

        tracker.deserialize({"life": u"node: somenode"})

        # to force an error should it not detect it via weakref
        vars(tracker).clear()
        ref = weakref.ref(tracker)
        del tracker
        import gc; gc.collect()
        assert not ref()

        reactortime.advance(31)
        assert not reactortime.getDelayedCalls()
Example #15
0
 def execute(self):
     """
     Run the calculator and aggregate the results
     """
     num_rlzs = 0
     allres = []
     source_models = self.csm.info.source_models
     with self.monitor('sending riskinputs', autoflush=True):
         self.sm_by_grp = self.csm.info.get_sm_by_grp()
         self.eid = collections.Counter()  # sm_id -> event_id
         for i, args in enumerate(self.gen_args()):
             smap, attrs = self.build_starmap(*args)
             logging.info(
                 'Generated %d/%d ruptures/events for source model #%d',
                 sum(attrs['num_ruptures'].values()), attrs['num_events'],
                 attrs['sm_id'] + 1)
             res = smap.submit_all()
             vars(res).update(attrs)
             allres.append(res)
             res.rlz_slice = slice(num_rlzs, num_rlzs + res.num_rlzs)
             num_rlzs += res.num_rlzs
             for sg in source_models[i].src_groups:
                 sg.eff_ruptures = res.num_ruptures[sg.id]
     self.datastore['csm_info'] = self.csm.info
     num_events = self.save_results(allres, num_rlzs)
     self.save_data_transfer(parallel.IterResult.sum(allres))
     return num_events
Example #16
0
    def dict_attributes(self, element):
        """
        Type aware attributes to dictionary function.

        :param element: Python object to list attributes.
        :rtype: dict
        :return: A dict of the element attributes.
        """
        if element is not None:

            # Element is a dictionary, return itself
            if isinstance(element, dict):
                return element

            # Element is a module as has a __all__ defined
            if ismodule(element) and hasattr(element, '__all__'):
                return {
                    k: v for k, v in vars(element).items()
                    if k in element.__all__
                }

            # The element can be converted to dict
            if hasattr(element, '__dict__'):
                return vars(element)

        return {}
Example #17
0
 def insert(self, objs, tableName):
     cursor = self.conn.cursor()
     colnames = vars(objs[0]).keys()
     qmarks = ['?' for x in range(len(colnames))]
     query = 'INSERT INTO ' + tableName + ' ("' + '","'.join(colnames) + '") VALUES (' + ', '.join(qmarks) + ')' 
     cursor.executemany(query, (vars(match).values() for match in objs))
     self.conn.commit()
Example #18
0
def post_import():
    """
    This is called after import or reload, to do further initialization
    of various modules.
    """

    import renpy # @UnresolvedImport

    # Create the store.
    renpy.python.create_store("store")

    # Import the contents of renpy.defaultstore into renpy.store, and set
    # up an alias as we do.
    renpy.store = sys.modules['store']
    renpy.exports.store = renpy.store
    sys.modules['renpy.store'] = sys.modules['store']

    import subprocess
    sys.modules['renpy.subprocess'] = subprocess

    for k, v in renpy.defaultstore.__dict__.iteritems():
        renpy.store.__dict__.setdefault(k, v)

    # Import everything into renpy.exports, provided it isn't
    # already there.
    for k, v in globals().iteritems():
        vars(renpy.exports).setdefault(k, v)
Example #19
0
def _check_extension_attrs(cls):
    """Make sure only customizations that need a restriction tag generate one"""

    extends = cls.__extends__

    eattrs = extends.Attributes
    cattrs = cls.Attributes

    ckeys = set([k for k in vars(cls.Attributes) if not k.startswith('_')])
    ekeys = set([k for k in vars(extends.Attributes) if not k.startswith('_')])

    # get the attributes different from the parent class
    diff = set()
    for k in (ckeys | ekeys):
        if getattr(eattrs, k, None) != getattr(cattrs, k, None):
            diff.add(k)

    # compare them with what comes from ATTR_NAMES
    attr_names = ATTR_NAMES[cls]
    retval = None
    while extends is not None:
        retval = extends
        if len(diff & attr_names) > 0:
            return extends
        extends = extends.__extends__

    return retval
Example #20
0
    def get_passed_tests(self):
        all_tests = []
        actual_number_of_tests_run = []
        failed_tests = []
        skipped_tests = []
        errored_tests = []
        setup_errored_classes = []
        setup_errored_tests = []
        passed_obj_list = []
        for item in vars(self.master_testsuite).get('_tests'):
            all_tests.append(vars(item).get('_tests')[0])
        for failed_test in self.failures:
            failed_tests.append(failed_test[0])
        for skipped_test in self.skipped:
            skipped_tests.append(skipped_test[0])
        for errored_test in self.errors:
            if errored_test[0].__class__.__name__ != '_ErrorHolder':
                errored_tests.append(errored_test[0])
            else:
                setup_errored_classes.append(
                    str(errored_test[0]).split(".")[-1].rstrip(')'))
        if len(setup_errored_classes) != 0:
            for item_1 in all_tests:
                for item_2 in setup_errored_classes:
                    if item_2 == item_1.__class__.__name__:
                        setup_errored_tests.append(item_1)
        else:
            actual_number_of_tests_run = all_tests

        for passed_test in list(set(all_tests) - set(failed_tests) - set(skipped_tests) - set(errored_tests) - set(setup_errored_tests)):
            passed_obj = Result(passed_test.__class__.__name__,
                                vars(passed_test).get('_testMethodName'))
            passed_obj_list.append(passed_obj)

        return passed_obj_list
Example #21
0
    def backup_module(self, mod):
        """
        Makes a backup of `mod`, which must be a Python module.
        """

        name = mod.__name__

        if not name.startswith("renpy"):
            return

        if name in backup_blacklist:
            return

        if name.startswith("renpy.styledata"):
            return

        self.names[mod] = set(vars(mod).keys())

        for k, v in vars(mod).iteritems():

            if k.startswith("__") and k.endswith("__"):
                continue

            if isinstance(v, type_blacklist):
                continue

            if name + "." + k in name_blacklist:
                continue

            idv = id(v)

            self.variables[mod, k] = idv
            self.objects[idv] = v
Example #22
0
def assert_close(a, b, rtol=1e-07, atol=0, context=None):
    """
    Compare for equality up to a given precision two composite objects
    which may contain floats. NB: if the objects are or contain generators,
    they are exhausted.

    :param a: an object
    :param b: another object
    :param rtol: relative tolerance
    :param atol: absolute tolerance
    """
    if isinstance(a, float) or isinstance(a, numpy.ndarray) and a.shape:
        # shortcut
        numpy.testing.assert_allclose(a, b, rtol, atol)
        return
    if a == b:  # another shortcut
        return
    if hasattr(a, '__slots__'):  # record-like objects
        assert_close_seq(a.__slots__, b.__slots__, rtol, atol, a)
        for x, y in zip(a.__slots__, b.__slots__):
            assert_close(getattr(a, x), getattr(b, y), rtol, atol, x)
        return
    if isinstance(a, collections.Mapping):  # dict-like objects
        assert_close_seq(a.keys(), b.keys(), rtol, atol, a)
        assert_close_seq(a.values(), b.values(), rtol, atol, a)
        return
    if hasattr(a, '__iter__'):  # iterable objects
        assert_close_seq(list(a), list(b), rtol, atol, a)
        return
    if hasattr(a, '__dict__'):  # objects with an attribute dictionary
        assert_close(vars(a), vars(b), context=a)
        return
    ctx = '' if context is None else 'in context ' + repr(context)
    raise AssertionError('%r != %r %s' % (a, b, ctx))
Example #23
0
    def __content_type_decorator(self):
        """
        Get produces decorator.

        If the operation mimetype format is json then the function return value is jsonified

        From Swagger Specification:

        **Produces**

        A list of MIME types the operation can produce. This overrides the produces definition at the Swagger Object.
        An empty value MAY be used to clear the global definition.

        :rtype: types.FunctionType
        """

        logger.debug('... Produces: %s', self.produces, extra=vars(self))

        mimetype = self.get_mimetype()
        if all_json(self.produces):  # endpoint will return json
            logger.debug('... Produces json', extra=vars(self))
            # TODO: Refactor this.
            return lambda f: f

        elif len(self.produces) == 1:
            logger.debug('... Produces %s', mimetype, extra=vars(self))
            decorator = Produces(mimetype)
            return decorator

        else:
            return BaseSerializer()
Example #24
0
def LogArguments(Log=Logger,
                 ArgParser=argparse.ArgumentParser,
                 Arguments=argparse.Namespace):
#   Argument logging module
    ArgParser.print_help()
    ArgParser.print_help(Log.GetFileHandle())

#   Calculate max. of keylength for formatting
    MaxLen = 0
    for Key in vars(Arguments).iterkeys():
        MaxLen = max(MaxLen,len(Key))
    LogString =  '\n****\n'+\
                 'Used arguments:\n'+\
                 '---------------'
    print LogString
    Log.Write(LogString+'\n')

    FormatString = '{0:<'+str(MaxLen)+'}'
    LogString    = ''
    for Key,Value in vars(Arguments).iteritems():
        LogString += FormatString.format(Key)+': '+str(Value)+'\n'
    LogString += '****\n'
    print LogString
    Log.Write(LogString+'\n')
    return
Example #25
0
def new_openscad_class_str( class_name, args=[], kwargs=[], include_file_path=None, use_not_include=True):
    args_str = ''
    args_pairs = ''
    
    for arg in args:
        args_str += ', '+arg
        args_pairs += "'%(arg)s':%(arg)s, "%vars()
        
    # kwargs have a default value defined in their SCAD versions.  We don't 
    # care what that default value will be (SCAD will take care of that), just
    # that one is defined.
    for kwarg in kwargs:
        args_str += ', %(kwarg)s=None'%vars()
        args_pairs += "'%(kwarg)s':%(kwarg)s, "%vars()
    
    if include_file_path:
        # NOTE the explicit import of 'solid' below. This is a fix for:
        # https://github.com/SolidCode/SolidPython/issues/20 -ETJ 16 Jan 2014
        result = ("import solid\n"
        "class %(class_name)s( solid.included_openscad_object):\n"
        "   def __init__(self%(args_str)s, **kwargs):\n"
        "       solid.included_openscad_object.__init__(self, '%(class_name)s', {%(args_pairs)s }, include_file_path='%(include_file_path)s', use_not_include=%(use_not_include)s, **kwargs )\n"
        "   \n"
        "\n"%vars())
    else:
        result = ("class %(class_name)s( openscad_object):\n"
        "   def __init__(self%(args_str)s):\n"
        "       openscad_object.__init__(self, '%(class_name)s', {%(args_pairs)s })\n"
        "   \n"
        "\n"%vars())
                
    return result
def main():
	name = sys.argv[1]
	Name = sys.argv[2]

	print """/* This code is AUTOGENERATED!!! */

#include "state.h"
#include "state_internals.h\""""

	print """
void crState%(Name)sDiff(CR%(Name)sBits *b, CRbitvalue *bitID,
		CRContext *fromCtx, CRContext *toCtx)
{
	CR%(Name)sState *from = &(fromCtx->%(name)s);
	CR%(Name)sState *to = &(toCtx->%(name)s);"""%vars()
	gendiffcode("state_%s.txt"%(name.lower()), name, docopy=1, doinvalid=0)
	print """}

void crState%(Name)sSwitch(CR%(Name)sBits *b, CRbitvalue *bitID,
		CRContext *fromCtx, CRContext *toCtx)
{
	CR%(Name)sState *from = &(fromCtx->%(name)s);
	CR%(Name)sState *to = &(toCtx->%(name)s);"""%vars()
	gendiffcode("state_%s.txt"%(Name.lower()), Name, docopy=0, doinvalid=1)
	print "}\n"
Example #27
0
def _load_backend(_backend, _platform):
    """load a process backend"""
    name = None
    _has_all = False
    try:
        name = __name__ + '.' + _platform
        mod = __import__(name, {}, {}, [_platform])
        #validate the module interface
        if not IKittProcModule.providedBy(mod):
            e = ImportError("%s does not implement IKittProcModule interface" \
                    % name)
            raise e #throw exception as an import error
        if '__all__' in vars(mod):
            _EXPORTED.update(set(mod.__all__))
            _has_all = True
        for var, obj in vars(mod).items():
            if hasattr(obj, '__class__') and var in _process_interfaces:
                #demonstrate you at least read the implementation
                if not _process_interfaces[var].implementedBy(obj):
                    e = 'Class [%s] from %s does not match interface' % \
                            (var,name) 
                    warnings.warn(e)
                    continue
            #update our namespace
            globals().update({var: obj})
            if _has_all: continue
            #add to exported interfaces
            if not var.startswith('_'):
                _EXPORTED.add(var)
    except ImportError: raise
    except AssertionError: pass
    except: traceback.print_exc()
Example #28
0
    def __init__(self, mapping, params=(), predict_proba=False, **kwargs):
        """
        :param mapping:
          A list of query parameters and their type that should be
          included in the request.  These will be processed in the
          :meth:`sample_from_request` method to construct a sample
          that can be used for prediction.  An example that expects
          two request parameters called ``pos`` and ``neg`` that are
          both of type str::

            { ...
              'mapping': [('pos', 'str'), ('neg', 'str')]
            ... }

        :param params:
          Similarly to *mapping*, this is a list of name and type of
          parameters that will be passed to the model's
          :meth:`~palladium.interfaces.Model.predict` method as keyword
          arguments.

        :param predict_proba:
          Instead of returning a single class (the default), when
          *predict_proba* is set to true, the result will instead
          contain a list of class probabilities.
        """
        self.mapping = mapping
        self.params = params
        self.predict_proba = predict_proba
        vars(self).update(kwargs)
Example #29
0
def simplifyTree(tree):
    resultList = []
    for node in list(ast.iter_child_nodes(tree)):
        simplenode = {}
        vars(node).pop('lineno')
        vars(node).pop('col_offset')
        #print(vars(node))
        for key, value in vars(node).items():
            if(isinstance(value, ast.Num)):
                simplenode[key] = simplifyNum(value)
            elif(isinstance(value, ast.Name)):
                simplifyName(value)
            elif(isinstance(value, ast.Str)):
                simplenode[key] = simplifyStr(value)
            elif(key is 'targets'):
                simplenode[key] = simplifyName(value[0])
            elif(isinstance(value, ast.Call)):
                simplenode[key] = simplifyCall(value)
            elif(isinstance(value, ast.List)):
                simplenode[key] = simplifyList(value)
            elif(isinstance(value, ast.Dict)):
                simplifyDict(value)
            else:
                print(type(value))
        resultList.append(simplenode)
    return resultList
Example #30
0
def post_exec_hook(hook):
    """
    Runs a hook function defined in a deploy.py file
    """
    # post_setupnode hook
    module_name = ".".join([env.project_package_name, "deploy"])
    funcs_run = []
    try:
        imported = import_module(module_name)
        func = vars(imported).get(hook)
        if func:
            func()
            funcs_run.append(func)
    except ImportError:
        return

    # run per app
    for app in env.INSTALLED_APPS:
        if app == "woven":
            continue
        module_name = ".".join([app, "deploy"])
        try:
            imported = import_module(module_name)
            func = vars(imported).get(hook)
            if func and func not in funcs_run:
                func()
                funcs_run.append(func)
        except ImportError:
            pass
    import woven.deploy

    func = vars(woven.deploy).get(hook)
    if func and func not in funcs_run:
        func()
def extractAssignmentData( assignments, verbose = False ):
    
    all_assignments_data = []
    
    # These are the contents of the assignments for the depth HITs
    _worker_id = ''
    _worker_exp = 0
    _hit_id = 0
    _assignment_id = ''
    _gui_rating = ''
    _hit_comment = ''
    _hit_rt = 0
    _hit_it = 0
    _trials_results = ''
    _hit_depth_str  = ''
    _hit_reject_flag = False
    _hit_flag = False

    for ass in assignments:
        if verbose:
            print "===================================================="
            print "Content of Assignment: [%s]" % ass.AssignmentId
            print "===================================================="
        for question_form_answer in ass.answers[0]:
            key = question_form_answer.qid
            value = question_form_answer.fields

            if key == '_hit_id':
                _hit_id = int(value[0])
                if verbose:
                    print " - HIT ID: [%d]" % (_hit_id)
            elif key == '_assignment_id':
                _assignment_id = value[0]
                if verbose:
                    print " - Assignment ID: [%s]" % (_assignment_id)
            elif key == '_worker_id':
                _worker_id = value[0]
                if verbose:
                    print " - Worker ID: [%s]" % (_worker_id)
            elif key == '_worker_exp':
                _worker_exp = int(value[0])
                if verbose:
                    print " - Worker experience: [%d]" % (_worker_exp)
            elif key == '_gui_rating':
                _gui_rating = value[0]
                try: 
                    _gui_rating = int(_gui_rating)
                except ValueError:
                    _gui_rating = -1
                if verbose:
                    print " - GUI rating: [%d/10]" % (_gui_rating)         
            elif key == '_hit_comment':
                _hit_comment = value[0]
                if verbose:
                    print " - Assignment comment: [%s]" % (_hit_comment)
            elif key == '_hit_rt':
                _hit_rt = int(value[0])
                if verbose:
                    print " - Assignment response time: [%d]" % (_hit_rt)
            elif key == '_hit_it':
                _hit_it = int(value[0])
                if verbose:
                    print " - Assignment instruction time: [%d]" % (_hit_it)
            elif key == '_trials_results':
                _trials_results = value[0]
                if verbose:
                    print " - Assignment results: [%s]" % (_trials_results)    
            elif key == '_hit_depth_str':
                _hit_depth_str = value[0]
                if verbose:
                    print " - Assignment depth string: [%s]" % (_hit_depth_str)    
            elif key == '_hit_reject_flag':
                _hit_reject_flag = value[0]
                if str(_hit_reject_flag) == 'false':
                    _hit_reject_flag = False
                else:
                    _hit_reject_flag = True
                if verbose:
                    print " - Assignment reject flag: [%s]" % (str(_hit_reject_flag))
            elif key == '_hit_flag':
                _hit_flag = value[0]
                if _hit_flag == 'Yes':
                    _hit_flag = True
                else:
                    _hit_flag = False
                if verbose:
                    print " - Assignment information flag: [%s]" % (str(_hit_flag))    
            elif key == "_dataset":
                _dataset = value[0]
                if verbose:
                    print " - Assignment dataset: [%s]" % (_dataset) 
            else:
                print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
                print "ERROR: unknown key [%r]" % (key,)
                print "Relevant info:"
                pprint(vars(_assignment))
                pprint(vars(question_form_answer))
                print "Exiting..."
                print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
                return

        tmp_ass                          = {}
        tmp_ass['_worker_id']            = _worker_id
        tmp_ass['_worker_exp']           = _worker_exp
        tmp_ass['_hit_id']               = _hit_id
        tmp_ass['_assignment_id']        = _assignment_id
        tmp_ass['_gui_rating']           = _gui_rating
        tmp_ass['_hit_comment']          = _hit_comment
        tmp_ass['_hit_rt']               = _hit_rt
        tmp_ass['_hit_it']               = _hit_it
        tmp_ass['_trials_results']       = _trials_results
        tmp_ass['_hit_depth_str']        = _hit_depth_str
        tmp_ass['_hit_reject_flag']      = _hit_reject_flag
        tmp_ass['_hit_flag']             = _hit_flag

        all_assignments_data.append( tmp_ass )

    return all_assignments_data
Example #32
0
 def updatePortfolio(self, contract, position, marketPrice, marketValue,
                     averageCost, unrealizedPNL, realizedPNL, accountName):
     showmessage('updatePortfolio', vars())
Example #33
0
 def updateAccountValue(self, key, value, currency, accountName):
     showmessage('updateAccountValue', vars())
Example #34
0
 def openOrderEnd(self):
     showmessage('openOrderEnd', vars())
Example #35
0
 def tickOptionComputation(self, tickerId, field, impliedVolatility, delta):
     showmessage('tickOptionComputation', vars())
Example #36
0
 def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints,
             impliedFuture, holdDays, futureExpiry, dividendImpact,
             dividendsToExpiry):
     showmessage('tickEFP', vars())
Example #37
0
 def error(self, id=None, errorCode=None, errorMsg=None):
     if errorCode == 2104:
         print "--> %s" % errorMsg
     else:
         showmessage('error', vars())
import requests
import cv2
import os

# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-u",
                "--urls",
                required=True,
                help="path to folder containing file containing image URLs")
ap.add_argument("-o",
                "--output",
                required=True,
                help="path to output directory of directory of images")
ap.add_argument("-l", "--loop", type=int, default=-1, help="get all or not")
args = vars(ap.parse_args())

#READ FOLDER
urllistlist = os.listdir(args["urls"])

#LOOP IT
for urllist in urllistlist:

    destination_path = os.path.sep.join([args["output"], urllist[:-4]])
    #CREATE DIRECTORY
    try:
        os.stat(destination_path)
    except:
        os.mkdir(destination_path)

    #PASS ARGUMENTS
# EXERCÍCIO Nº 40 - LISTA 03 - ESTRUTURA DE REPETIÇÃO
print('\nEstatística das cidades')
print('#######################\n')
soma = 0
soma_2 = 0
a = 1
for i in range(1, 6):
    código_da_cidade = float(input('Insira o código da cidade: '))
    número_de_veículos_da_cidade = int(input('Insira o número de veículos da cidade: '))
    número_de_acidentes_com_vítima = int(input('Insira o número de acidentes com vítima na cidade: '))
    if número_de_veículos_da_cidade<2000:
        soma_2+=número_de_acidentes_com_vítima
        a+=1
    if 'maior_índice' not in vars() or número_de_acidentes_com_vítima > maior_índice:
        maior_índice = número_de_acidentes_com_vítima
        código_da_cidade_com_maior_número_de_vítimas = código_da_cidade

    if 'menor_índice' not in vars() or número_de_acidentes_com_vítima < menor_índice:
        menor_índice = número_de_acidentes_com_vítima
        código_da_cidade_com_menor_número_de_vítimas = código_da_cidade
    soma += número_de_veículos_da_cidade

média_de_acidentes_cidades = soma_2 / a
média = soma / 5
print('A cidade com maior índice de acidentes é a ', int(código_da_cidade_com_maior_número_de_vítimas), 'com',
      maior_índice, 'vítimas')
print('A cidade com menor índice de acidentes é a ', int(código_da_cidade_com_menor_número_de_vítimas), 'com',
      menor_índice, 'vítimas')
print('A média de véiculos por cidade é de: ', média, 'veículos')
print('A média de acidentes nas cidades com menos de 2000 véiculos é de: ',média_de_acidentes_cidades)
def main(args):
    # initialization
    print("Input arguments:")
    for key, val in vars(args).items():
        print("{:16} {}".format(key, val))

    if not os.path.exists(args.snapshot_dir):
        os.makedirs(args.snapshot_dir)
    writer = SummaryWriter(log_dir=os.path.join(args.log_dir, args.method))

    random.seed(args.seed)
    torch.manual_seed(args.seed)
    cudnn.enabled = True
    cudnn.benchmark = True

    # conduct seg network
    seg_model = get_model(num_classes=args.num_classes)

    saved_state_dict = torch.load(args.restore_from)
    new_params = seg_model.state_dict().copy()

    if args.init:
        for i in saved_state_dict:
            i_parts = i.split('.')
            if not i_parts[0] == 'fc':
                new_params['encoder.' + '.'.join(i_parts[:])] = saved_state_dict[i]
        seg_model.load_state_dict(new_params)
        print('loading params w/o fc')
    else:
        seg_model.load_state_dict(saved_state_dict)
        print('loading params all')

    model = DataParallelModel(seg_model)
    model.float()
    model.cuda()

    # define dataloader
    train_loader = data.DataLoader(DataGenerator(root=args.root, list_path=args.lst,
                                                    crop_size=args.crop_size, training=True),
                                   batch_size=args.batch_size, shuffle=True, num_workers=4, pin_memory=True)
    val_loader = data.DataLoader(DataGenerator(root=args.val_root, list_path=args.val_lst,
                                                  crop_size=args.crop_size, training=False),
                                 batch_size=args.batch_size, shuffle=False, num_workers=4, pin_memory=True)

    # define criterion & optimizer
    criterion = ABRLovaszLoss(ignore_index=args.ignore_label, only_present=True, cls_p= args.num_classes, cls_h= args.hbody_cls, cls_f= args.fbody_cls)
    criterion = DataParallelCriterion(criterion).cuda()

    optimizer = optim.SGD(
        [{'params': filter(lambda p: p.requires_grad, seg_model.parameters()), 'lr': args.learning_rate}],
        lr=args.learning_rate, momentum=0.9, weight_decay=5e-4)

    # key points
    best_val_mIoU = 0
    best_val_pixAcc = 0
    start = time.time()

    for epoch in range(0, args.epochs):
        print('\n{} | {}'.format(epoch, args.epochs - 1))
        # training
        _ = train(model, train_loader, epoch, criterion, optimizer, writer)

        # validation
        if epoch %10 ==0 or epoch > args.epochs*0.8:
            val_pixacc, val_miou = validation(model, val_loader, epoch, writer)
            # save model
            if val_pixacc > best_val_pixAcc:
                best_val_pixAcc = val_pixacc
            if val_miou > best_val_mIoU:
                best_val_mIoU = val_miou
                model_dir = os.path.join(args.snapshot_dir, args.method + '_miou.pth')
                torch.save(seg_model.state_dict(), model_dir)
                print('Model saved to %s' % model_dir)

    os.rename(model_dir, os.path.join(args.snapshot_dir, args.method + '_miou'+str(best_val_mIoU)+'.pth'))
    print('Complete using', time.time() - start, 'seconds')
    print('Best pixAcc: {} | Best mIoU: {}'.format(best_val_pixAcc, best_val_mIoU))
.. |cjc101| replace:: `CJC 101: Criminal Justice in a Global Context <http://www.luc.edu/criminaljustice/undergradcourses.shtml>`__

.. |cjc102| replace:: `CJC 102: The Criminal Justice System <http://www.luc.edu/criminaljustice/undergradcourses.shtml>`__

.. |cjc322| replace:: `CJC 322: Criminal Courts and Law <http://www.luc.edu/criminaljustice/undergradcourses.shtml>`__

.. |cjc323| replace:: `CJC 323: Criminal Procedure <http://www.luc.edu/criminaljustice/undergradcourses.shtml>`__

.. |pdf-version| replace:: `printable PDF  <https://github.com/LoyolaChicagoCS/coursedescriptions/releases/download/%(version)s/LoyolaComputerScienceAcademicPrograms.pdf>`__

.. |epub-version| replace:: `eBook <https://github.com/LoyolaChicagoCS/coursedescriptions/releases/download/%(version)s/LoyolaComputerScienceAcademicPrograms.epub>`__

.. |see-locus| replace:: The scheduling information you see here is an export from `LOCUS <https://locus.luc.edu>`__. LOCUS is the authoritative source of information for university course scheduling. What you see here is subject to change at any time.

""" % vars())

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))

# -- General configuration -----------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
    "sphinx.ext.todo", "sphinx.ext.mathjax", "sphinx.ext.autosectionlabel"
def main_handler():
    # set up logging to syslog
    # construct the argument parse and parse the arguments
  
    ap = argparse.ArgumentParser()
    ap.add_argument('-c', '--config', help='config file with path')
    ap.add_argument('-e', '--eventid', help='event ID to retrieve')
    ap.add_argument('-p',
                    '--eventpath',
                    help='path to store object image file',
                    default='')
    ap.add_argument('-m', '--monitorid', help='monitor id - needed for mask')
    ap.add_argument('-v',
                    '--version',
                    help='print version and quit',
                    action='store_true')

    ap.add_argument('-o', '--output-path',
                    help='internal testing use only - path for debug images to be written')

    ap.add_argument('-f',
                    '--file',
                    help='internal testing use only - skips event download')


    ap.add_argument('-r', '--reason', help='reason for event (notes field in ZM)')

    ap.add_argument('-n', '--notes', help='updates notes field in ZM with detections', action='store_true')
    ap.add_argument('-d', '--debug', help='enables debug on console', action='store_true')

    args, u = ap.parse_known_args()
    args = vars(args)

    if args.get('version'):
        print('hooks:{} pyzm:{}'.format(hooks_version, pyzm_version))
        exit(0)

    if not args.get('config'):
        print ('--config required')
        exit(1)

    if not args.get('file')and not args.get('eventid'):
        print ('--eventid required')
        exit(1)

    utils.get_pyzm_config(args)

    if args.get('debug'):
        g.config['pyzm_overrides']['dump_console'] = True
        g.config['pyzm_overrides']['log_debug'] = True
        g.config['pyzm_overrides']['log_level_debug'] = 5
        g.config['pyzm_overrides']['log_debug_target'] = None

    if args.get('monitorid'):
        log.init(name='zmesdetect_' + 'm' + args.get('monitorid'), override=g.config['pyzm_overrides'])
    else:
        log.init(name='zmesdetect',override=g.config['pyzm_overrides'])
    g.logger = log
    
    es_version='(?)'
    try:
        es_version=subprocess.check_output(['/usr/bin/zmeventnotification.pl', '--version']).decode('ascii')
    except:
        pass


    try:
        import cv2
    except ImportError as e:
        g.logger.Fatal (f'{e}: You might not have installed OpenCV as per install instructions. Remember, it is NOT automatically installed')

    g.logger.Info('---------| pyzm version:{}, hook version:{},  ES version:{} , OpenCV version:{}|------------'.format(pyzm_version, hooks_version, es_version, cv2.__version__))
   

    
    # load modules that depend on cv2
    try:
        import zmes_hook_helpers.image_manip as img
    except Exception as e:
        g.logger.Error (f'{e}')
        exit(1)
    g.polygons = []

    # process config file
    g.ctx = ssl.create_default_context()
    utils.process_config(args, g.ctx)


    # misc came later, so lets be safe
    if not os.path.exists(g.config['base_data_path'] + '/misc/'):
        try:
            os.makedirs(g.config['base_data_path'] + '/misc/')
        except FileExistsError:
            pass  # if two detects run together with a race here

    if not g.config['ml_gateway']:
        g.logger.Info('Importing local classes for Object/Face')
        import pyzm.ml.object as object_detection
       
    else:
        g.logger.Info('Importing remote shim classes for Object/Face')
        from zmes_hook_helpers.apigw import ObjectRemote, FaceRemote, AlprRemote
    # now download image(s)


    start = datetime.datetime.now()

    obj_json = []

    import pyzm.api as zmapi
    api_options  = {
    'apiurl': g.config['api_portal'],
    'portalurl': g.config['portal'],
    'user': g.config['user'],
    'password': g.config['password'] ,
    'logger': g.logger, # use none if you don't want to log to ZM,
    'disable_ssl_cert_check': False if g.config['allow_self_signed']=='no' else True
    }

    g.logger.Info('Connecting with ZM APIs')
    zmapi = zmapi.ZMApi(options=api_options)
    stream = args.get('eventid') or args.get('file')
    ml_options = {}
    stream_options={}
    secrets = None 
    
    if g.config['ml_sequence'] and g.config['use_sequence'] == 'yes':
        g.logger.Debug(2,'using ml_sequence')
        ml_options = g.config['ml_sequence']
        secrets = pyzmutils.read_config(g.config['secrets'])
        ml_options = pyzmutils.template_fill(input_str=ml_options, config=None, secrets=secrets._sections.get('secrets'))
        ml_options = ast.literal_eval(ml_options)
        g.config['ml_sequence'] = ml_options
    else:
        g.logger.Debug(2,'mapping legacy ml data from config')
        ml_options = utils.convert_config_to_ml_sequence()
        g.config['ml_sequence'] = ml_options

    if g.config['stream_sequence'] and g.config['use_sequence'] == 'yes': # new sequence
        g.logger.Debug(2,'using stream_sequence')
        stream_options = g.config['stream_sequence']
        stream_options = ast.literal_eval(stream_options)
    else: # legacy
        g.logger.Debug(2,'mapping legacy stream data from config')
        if g.config['detection_mode'] == 'all':
            g.config['detection_mode'] = 'most_models'
        frame_set = g.config['frame_id']
        if g.config['frame_id'] == 'bestmatch':
            if g.config['bestmatch_order'] == 's,a':
                frame_set = 'snapshot,alarm'
            else:
                frame_set = 'alarm,snapshot'
        stream_options['resize'] =int(g.config['resize']) if g.config['resize'] != 'no' else None

        stream_options['strategy'] = g.config['detection_mode'] 
        stream_options['frame_set'] = frame_set       
        stream_options['disable_ssl_cert_check'] =  False if g.config['allow_self_signed']=='no' else True


    # These are stream options that need to be set outside of supplied configs         
    stream_options['api'] = zmapi
    stream_options['polygons'] = g.polygons
    g.config['stream_sequence'] = stream_options


    '''
    stream_options = {
            'api': zmapi,
            'download': False,
            'frame_set': frame_set,
            'strategy': g.config['detection_mode'],
            'polygons': g.polygons,
            'resize': int(g.config['resize']) if g.config['resize'] != 'no' else None

    }
    '''

   
    m = None
    matched_data = None
    all_data = None

    if not args['file'] and int(g.config['wait']) > 0:
        g.logger.Info('Sleeping for {} seconds before inferencing'.format(
            g.config['wait']))
        time.sleep(g.config['wait'])

    if g.config['ml_gateway']:
        stream_options['api'] = None
        stream_options['monitorid'] = args.get('monitorid')
        start = datetime.datetime.now()
        try:
            matched_data,all_data = remote_detect(stream=stream, options=stream_options, api=zmapi)
            diff_time = (datetime.datetime.now() - start)
            g.logger.Debug(1,'Total remote detection detection took: {}'.format(diff_time))
        except Exception as e:
            g.logger.Error ("Error with remote mlapi:{}".format(e))
            g.logger.Debug(2,traceback.format_exc())

            if g.config['ml_fallback_local'] == 'yes':
                g.logger.Debug (1, "Falling back to local detection")
                stream_options['api'] = zmapi
                from pyzm.ml.detect_sequence import DetectSequence
                m = DetectSequence(options=ml_options, logger=g.logger)
                matched_data,all_data = m.detect_stream(stream=stream, options=stream_options)
    

    else:
        from pyzm.ml.detect_sequence import DetectSequence
        m = DetectSequence(options=ml_options, logger=g.logger)
        matched_data,all_data = m.detect_stream(stream=stream, options=stream_options)
    


    #print(f'ALL FRAMES: {all_data}\n\n')
    #print (f"SELECTED FRAME {matched_data['frame_id']}, size {matched_data['image_dimensions']} with LABELS {matched_data['labels']} {matched_data['boxes']} {matched_data['confidences']}")
    #print (matched_data)
    '''
     matched_data = {
            'boxes': matched_b,
            'labels': matched_l,
            'confidences': matched_c,
            'frame_id': matched_frame_id,
            'image_dimensions': self.media.image_dimensions(),
            'image': matched_frame_img
        }
    '''

    # let's remove past detections first, if enabled 
    if g.config['match_past_detections'] == 'yes' and args.get('monitorid'):
        # point detections to post processed data set
        g.logger.Info('Removing matches to past detections')
        bbox_t, label_t, conf_t = img.processPastDetection(
            matched_data['boxes'], matched_data['labels'], matched_data['confidences'], args.get('monitorid'))
        # save current objects for future comparisons
        g.logger.Debug(1,
            'Saving detections for monitor {} for future match'.format(
                args.get('monitorid')))
        try:
            mon_file = g.config['image_path'] + '/monitor-' + args.get(
            'monitorid') + '-data.pkl'
            f = open(mon_file, "wb")
            pickle.dump(matched_data['boxes'], f)
            pickle.dump(matched_data['labels'], f)
            pickle.dump(matched_data['confidences'], f)
            f.close()
        except Exception as e:
            g.logger.Error(f'Error writing to {mon_file}, past detections not recorded:{e}')

        matched_data['boxes'] = bbox_t
        matched_data['labels'] = label_t
        matched_data['confidences'] = conf_t

    obj_json = {
        'labels': matched_data['labels'],
        'boxes': matched_data['boxes'],
        'frame_id': matched_data['frame_id'],
        'confidences': matched_data['confidences'],
        'image_dimensions': matched_data['image_dimensions']
    }

    # 'confidences': ["{:.2f}%".format(item * 100) for item in matched_data['confidences']],
    
    detections = []
    seen = {}
    pred=''
    prefix = ''

    if matched_data['frame_id'] == 'snapshot':
        prefix = '[s] '
    elif matched_data['frame_id'] == 'alarm':
        prefix = '[a] '
    else:
        prefix = '[x] '
        #g.logger.Debug (1,'CONFIDENCE ARRAY:{}'.format(conf))
    for idx, l in enumerate(matched_data['labels']):
        if l not in seen:
            if g.config['show_percent'] == 'no':
                pred = pred + l + ','
            else:
                pred = pred + l + ':{:.0%}'.format(matched_data['confidences'][idx]) + ' '
            seen[l] = 1

    if pred != '':
        pred = pred.rstrip(',')
        pred = prefix + 'detected:' + pred
        g.logger.Info('Prediction string:{}'.format(pred))
        jos = json.dumps(obj_json)
        g.logger.Debug(1,'Prediction string JSON:{}'.format(jos))
        print(pred + '--SPLIT--' + jos)

        if (matched_data['image'] is not None) and (g.config['write_image_to_zm'] == 'yes' or g.config['write_debug_image'] == 'yes'):
            debug_image = pyzmutils.draw_bbox(image=matched_data['image'],boxes=matched_data['boxes'], 
                                              labels=matched_data['labels'], confidences=matched_data['confidences'],
                                              polygons=g.polygons, poly_thickness = g.config['poly_thickness'])

            if g.config['write_debug_image'] == 'yes':
                for _b in matched_data['error_boxes']:
                    cv2.rectangle(debug_image, (_b[0], _b[1]), (_b[2], _b[3]),
                        (0,0,255), 1)
                filename_debug = g.config['image_path']+'/'+os.path.basename(append_suffix(stream, '-{}-debug'.format(matched_data['frame_id'])))
                g.logger.Debug (1,'Writing bound boxes to debug image: {}'.format(filename_debug))
                cv2.imwrite(filename_debug,debug_image)

            if g.config['write_image_to_zm'] == 'yes' and args.get('eventpath'):
                g.logger.Debug(1,'Writing detected image to {}/objdetect.jpg'.format(
                    args.get('eventpath')))
                cv2.imwrite(args.get('eventpath') + '/objdetect.jpg', debug_image)
                jf = args.get('eventpath')+ '/objects.json'
                g.logger.Debug(1,'Writing JSON output to {}'.format(jf))
                try:
                    with open(jf, 'w') as jo:
                        json.dump(obj_json, jo)
                        jo.close()
                except Exception as e:
                    g.logger.Error(f'Error creating {jf}:{e}')
                    
        if args.get('notes'):
            url = '{}/events/{}.json'.format(g.config['api_portal'], args['eventid'])
            try:
                ev = zmapi._make_request(url=url,  type='get')
            except Exception as e:
                g.logger.Error ('Error during event notes retrieval: {}'.format(str(e)))
                g.logger.Debug(2,traceback.format_exc())
                exit(0) # Let's continue with zmdetect

            new_notes = pred
            if ev.get('event',{}).get('Event',{}).get('Notes'): 
                old_notes = ev['event']['Event']['Notes']
                old_notes_split = old_notes.split('Motion:')
                old_d = old_notes_split[0] # old detection
                try:
                    old_m = old_notes_split[1] 
                except IndexError:
                    old_m = ''
                new_notes = pred + 'Motion:'+ old_m
                g.logger.Debug (1,'Replacing old note:{} with new note:{}'.format(old_notes, new_notes))
                

            payload = {}
            payload['Event[Notes]'] = new_notes
            try:
                ev = zmapi._make_request(url=url, payload=payload, type='put')
            except Exception as e:
                g.logger.Error ('Error during notes update: {}'.format(str(e)))
                g.logger.Debug(2,traceback.format_exc())

        if g.config['create_animation'] == 'yes':
            if not args.get('eventid'):
                g.logger.Error ('Cannot create animation as you did not pass an event ID')
            else:
                g.logger.Debug(1,'animation: Creating burst...')
                try:
                    img.createAnimation(matched_data['frame_id'], args.get('eventid'), args.get('eventpath')+'/objdetect', g.config['animation_types'])
                except Exception as e:
                    g.logger.Error('Error creating animation:{}'.format(e))
                    g.logger.Error('animation: Traceback:{}'.format(traceback.format_exc()))
Example #43
0
"""
The choice of whether or not to use lists or sets depends on intended use. Use a list if
you want to preserve the insertion order of the items. Use a set if you want to eliminate
duplicates (and don't care about the order).

To easily construct such dictionaries, you can use defaultdict in the collections
module. A feature of defaultdict is that it automatically initializes the first value so
that you can simply focus on adding items. For example:
"""
from collections import defaultdict

d = defaultdict(list)
d['a'].append(1)
d['a'].append(2)
d['b'].append(4)
print('d = {d}'.format_map(vars()))

d = defaultdict(set)
d['a'].add(1)
d['a'].add(2)
d['b'].add(4)
print('d = {d}'.format_map(vars()))
"""
One caution with defaultdict is that it will automatically create dictionary entries for
keys accessed later on (even if they aren't currently found in the dictionary). If you don't
want this behavior, you might use setdefault() on an ordinary dictionary instead. For
example:
"""
d = {}  # A regular dictionary
d.setdefault('a', []).append(1)
d.setdefault('a', []).append(2)
Example #44
0
    args = parser.parse_args()

    save_path = os.path.join("../psa_zbf/output/model_weights",
                             args.session_name)

    print("dloss weight", args.densecrfloss)
    critersion = torch.nn.CrossEntropyLoss(weight=None, ignore_index=255, reduction='elementwise_mean').cuda()
    DenseEnergyLosslayer = DenseEnergyLoss(weight=args.densecrfloss, sigma_rgb=args.sigma_rgb,
                                     sigma_xy=args.sigma_xy, scale_factor=args.rloss_scale)

    model = getattr(importlib.import_module(args.network), 'SegNet')()

    pyutils.Logger(args.session_name + '.log')

    print(vars(args))

    train_dataset = voc12.data.VOC12ClsDataset(args.train_list, voc12_root=args.voc12_root,
                                               transform=transforms.Compose([
                        imutils.RandomResizeLong(256, 512),
                        transforms.RandomHorizontalFlip(),
                        transforms.ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.1),
                        np.asarray]),
                        transform2=
                        imutils.Compose([imutils.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
                        imutils.RandomCrop(args.crop_size),
                        imutils.HWC_to_CHW]))

    train_data_loader = DataLoader(train_dataset, batch_size=args.batch_size,
                                   shuffle=True, num_workers=args.num_workers, pin_memory=True, drop_last=True)
Example #45
0
def generate_controller(compute_function,
                        classname,
                        outfile,
                        filename_template,
                        pool_function,
                        overwrite,
                        filename_model,
                        filename_forms=None,
                        filename_db_models=None,
                        app_file=None,
                        login=False):

    if not overwrite and outfile is not None and os.path.isfile(outfile):
        if not strtobool(
                raw_input("The file %s already exists. Overwrite? [Y/N]: " %
                          outfile)):
            return None

    compute_function_name = compute_function.__name__
    compute_function_file = compute_function.__module__

    if pool_function:
        pool_function_name = pool_function.__name__
        pool_function_file = pool_function.__module__
        pool = True
    else:
        pool = False

    import inspect
    arg_names = inspect.getargspec(compute_function).args
    defaults = inspect.getargspec(compute_function).defaults

    # Add code for file upload only if it is strictly needed
    file_upload = False

    if pool:
        # FIXME: This should be replaced by a good regex
        filetxt = ("widget='file'", 'widget="file"', "widget = 'file'",
                   'widget = "file"')
        pooltxt = open(pool_function_file + ".py", 'r').read()
        for txt in filetxt:
            if txt in pooltxt:
                file_upload = True
                break
    else:
        for name in arg_names:
            if 'filename' in name:
                file_upload = True
                break

    if login:
        forms_module = filename_forms.replace('.py', '')
        db_models_module = filename_db_models.replace('.py', '')
        app_module = app_file.replace('.py', '')
    else:
        pass
    # Should be in the else-block, but that will lead
    # to an error in the comment on line 65.
    # See the TODO on line 67.
    model_module = filename_model.replace('.py', '')

    code = '''\
import os
from %(compute_function_file)s import %(compute_function_name)s as compute_function
''' % vars()
    if pool:
        code += '''
# Pool object (must be imported before %(model_module)s)
# AEJ: Why? With login we don't even have this file.
# TODO: Find out the reason for this order of imports.
from %(pool_function_file)s import %(pool_function_name)s as pool_function
pool = pool_function()

# Can define other default values in a file: --poolfile name
from parampool.pool.UI import set_defaults_from_file
pool = set_defaults_from_file(pool)
# Can override default values on the command line
from parampool.pool.UI import set_values_from_command_line
pool = set_values_from_command_line(pool)
''' % vars()
    code += '''
from flask import Flask, render_template, request'''
    if file_upload:
        code += ', session'
    if login:
        code += ', redirect, url_for'
        code += '''
from %(forms_module)s import %(classname)sForm
from %(db_models_module)s import db, User, %(classname)s
from flask.ext.login import LoginManager, current_user, login_user, logout_user, login_required
from %(app_module)s import app
''' % vars()
    else:
        code += '''
from %(model_module)s import %(classname)s
''' % vars()

    if file_upload:
        code += '''\
from werkzeug import secure_filename
'''

    if login:
        if file_upload:
            code += '''
# Allowed file types for file upload
ALLOWED_EXTENSIONS = set(['txt', 'dat', 'npy'])

# Relative path of folder for uploaded files
UPLOAD_DIR = 'uploads/'
app.config['UPLOAD_FOLDER'] = UPLOAD_DIR
if not os.path.isdir(UPLOAD_DIR):
    os.mkdir(UPLOAD_DIR)

def allowed_file(filename):
    """Does filename have the right extension?"""
    return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
'''

        code += '''
login_manager = LoginManager()
login_manager.init_app(app)

@login_manager.user_loader
def load_user(user_id):
    return db.session.query(User).get(user_id)

def is_anonymous(user):
    return bool((isinstance(user.is_anonymous, bool) and
                 user.is_anonymous) or \
                (callable(user.is_anonymous) and user.is_anonymous()))

def is_authenticated(user):
    return bool((isinstance(user.is_authenticated, bool) and
                 user.is_authenticated) or \
                (callable(user.is_authenticated) and user.is_authenticated()))

# Path to the web application
@app.route('/', methods=['GET', 'POST'])
def index():
    result = None
    user = current_user
    form = %(classname)sForm(request.form)
    if request.method == "POST":''' % vars()

        if file_upload:
            code += '''
        if request.files:'''
            if pool:
                code += '''
            for name, file in request.files.iteritems():
                if allowed_file(file.filename):
                    filename = secure_filename(file.filename)
                    file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
                    pool.set_value(name, filename)
                else:
                    raise TypeError("Illegal filename")
'''
            else:
                code += '''
            file = request.files[form.filename.name]
            if file and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
                session["filename"] = filename
            else:
                session["filename"] = None

        else:
            session["filename"] = None
'''
        else:
            code += '''
        if form.validate():
'''
        if pool:
            code += '''
            # Send data to Pool object
            for field in form:
                if field.name not in request.files:
                    name = field.description
                    value = field.data
                    pool.set_value(name, value)

            result = compute(pool)
            if is_authenticated(user):
                object = %(classname)s()
                form.populate_obj(object)
                object.result = result
                object.user = user
''' % vars()
            if file_upload:
                code += '''\
                for name, file in request.files.iteritems():
                    setattr(object, name, pool.get(name).get_value())
'''
            code += '''\
                db.session.add(object)
                db.session.commit()

                # Send email notification
                if user.notify and user.email:
                    send_email(user)
'''
        else:
            if file_upload:
                code += '''
        result = compute(form)

        if is_authenticated(user):
            object = %(classname)s()
            form.populate_obj(object)
            object.result = result
            object.user = user
            object.filename = session["filename"]
            db.session.add(object)
            db.session.commit()

            # Send email notification
            if user.notify and user.email:
                send_email(user)
''' % vars()
            else:
                code += '''

            result = compute(form)
            if is_authenticated(user):
                object = %(classname)s()
                form.populate_obj(object)
                object.result = result
                object.user = user
                db.session.add(object)
                db.session.commit()

                # Send email notification
                if user.notify and user.email:
                    send_email(user)
''' % vars()
        code += '''
    else:
        if is_authenticated(user):
            if user.%(classname)s.count() > 0:
                instance = user.%(classname)s.order_by('-id').first()
                result = instance.result
                form = populate_form_from_instance(instance)

    return render_template("%(filename_template)s", form=form, result=result,
                           user=user, is_anonymous=is_anonymous(user))
''' % vars()

    else:
        code += '''
# Application object
app = Flask(__name__)
''' % vars()

        if file_upload:
            code += '''
# Allowed file types for file upload
ALLOWED_EXTENSIONS = set(['txt', 'dat', 'npy'])

# Relative path of folder for uploaded files
UPLOAD_DIR = 'uploads/'

app.config['UPLOAD_FOLDER'] = UPLOAD_DIR
app.secret_key = 'MySecretKey'

if not os.path.isdir(UPLOAD_DIR):
    os.mkdir(UPLOAD_DIR)

def allowed_file(filename):
    return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
''' % vars()

        code += '''
# Path to the web application
@app.route('/', methods=['GET', 'POST'])
def index():
    form = %(classname)s(request.form)
    if request.method == 'POST' and form.validate():
''' % vars()
        if file_upload:
            # Need to write custom validation for files
            code = code.replace(" and form.validate()", "")
            code += '''
        # Save uploaded file if it exists and is valid
        if request.files:'''
            if pool:
                code += '''
            for name, file in request.files.iteritems():
                if allowed_file(file.filename):
                    filename = secure_filename(file.filename)
                    file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
                    pool.set_value(name, filename)
                else:
                    raise TypeError("Illegal filename")
'''
            else:
                code += '''
            file = request.files[form.filename.name]
            if file and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
                session["filename"] = filename
            else:
                session["filename"] = None
        else:
            session["filename"] = None
'''

        if pool:
            code += '''
        # Send data to Pool object
        for field in form:
            if field.name not in request.files:
                name = field.description
                value = field.data
                data_item = pool.set_value(name, value)

        result = compute(pool)
'''
        else:
            code += '''
        result = compute(form)
'''
        code += '''\

    else:
        result = None

    return render_template("%(filename_template)s", form=form, result=result)

''' % vars()

    if pool:
        code += '''
def compute(pool):
    """
    Generic function for calling compute_function with values
    taken from the pool object.
    Return the output from the compute_function.
    """

    # compute_function must have only one positional argument
    # named pool
    import inspect
    arg_names = inspect.getargspec(compute_function).args
    if len(arg_names) == 1 and arg_names[0] == "pool":
        result = compute_function(pool)
    else:
        raise TypeError('%s(%s) can only have one argument named "pool"'
                        % (compute_function.__name__, ', '.join(arg_names)))
    return result
'''
    else:
        code += '''
def compute(form):
    """
    Generic function for compute_function with arguments
    taken from a form object (wtforms.Form subclass).
    Return the output from the compute_function.
    """
    # Extract arguments to the compute function
    import inspect
    arg_names = inspect.getargspec(compute_function).args

    # Extract values from form
    form_values = [getattr(form, name) for name in arg_names
                   if hasattr(form, name)]
''' % vars()

        if not file_upload:
            code += '''
    form_data = [value.data for value in form_values]
'''
        else:
            code += '''
    import wtforms
    form_data = []
    for value in form_values:
        if not isinstance(value, wtforms.fields.simple.FileField):
            form_data.append(value.data)
        else:
            form_data.append(session["filename"])
'''

        # Insert helper code if positional
        # arguments because the user must then convert form_data
        # elements explicitly.
        if not defaults or len(defaults) != len(arg_names):
            # Insert example on argument conversion since there are
            # positional arguments where default_field might be the
            # wrong type
            code += '''
    # Convert data to right types (if necessary)
    # for i in range(len(form_data)):
    #    name = arg_names[i]
    #    if name == '...':
    #         form_data[i] = int(form_data[i])
    #    elif name == '...':
'''
        else:
            # We have default values: do right conversions
            code += '''
    defaults  = inspect.getargspec(compute_function).defaults

    # Make defaults as long as arg_names so we can traverse both with zip
    if defaults:
        defaults = ["none"]*(len(arg_names)-len(defaults)) + list(defaults)
    else:
        defaults = ["none"]*len(arg_names)

    # Convert form data to the right type:
    import numpy
    for i in range(len(form_data)):
        if defaults[i] != "none":
            if isinstance(defaults[i], (str,bool,int,float)):
                pass  # special widgets for these types do the conversion
            elif isinstance(defaults[i], numpy.ndarray):
                form_data[i] = numpy.array(eval(form_data[i]))
            elif defaults[i] is None:
                if form_data[i] == 'None':
                    form_data[i] = None
                else:
                    try:
                        # Try eval if it succeeds...
                        form_data[i] = eval(form_data[i])
                    except:
                        pass # Just keep the text
            else:
                # Use eval to convert to right type (hopefully)
                try:
                    form_data[i] = eval(form_data[i])
                except:
                    print 'Could not convert text %s to %s for argument %s' % (form_data[i], type(defaults[i]), arg_names[i])
                    print 'when calling the compute function...'
'''
        code += '''
    # Run computations
    result = compute_function(*form_data)
    return result
'''
    if login:
        code += '''
def populate_form_from_instance(instance):
    """Repopulate form with previous values"""
    form = %(classname)sForm()
    for field in form:
        field.data = getattr(instance, field.name)
    return form

#app.config['MAIL_SERVER'] = 'smtp.gmail.com'

def send_email(user):
    from flask.ext.mail import Mail, Message
    mail = Mail(app)
    msg = Message("%(classname)s Computations Complete",
                  recipients=[user.email])
    msg.body = """\
A simulation has been completed by the Flask %(classname)s app. Please log in at

http://localhost:5000/login

to see the results.

---
This email has been automatically generated by the %(classname)s app created by
Parampool. If you don't want email notifications when a result is found, please
register a new user and leave the 'notify' field unchecked."""
    mail.send(msg)

@app.route('/reg', methods=['GET', 'POST'])
def create_login():
    from %(forms_module)s import register_form
    form = register_form(request.form)
    if request.method == 'POST' and form.validate():
        user = User()
        form.populate_obj(user)
        user.set_password(form.password.data)

        db.session.add(user)
        db.session.commit()

        login_user(user)
        return redirect(url_for('index'))
    return render_template("reg.html", form=form)

@app.route('/login', methods=['GET', 'POST'])
def login():
    from %(forms_module)s import login_form
    form = login_form(request.form)
    if request.method == 'POST' and form.validate():
        user = form.get_user()
        login_user(user)
        return redirect(url_for('index'))
    return render_template("login.html", form=form)

@app.route('/logout')
@login_required
def logout():
    logout_user()
    return redirect(url_for('index'))

@app.route('/old')
@login_required
def old():
    data = []
    user = current_user
    if is_authenticated(user):
        instances = user.%(classname)s.order_by('-id').all()
        for instance in instances:
            form = populate_form_from_instance(instance)

            result = instance.result
            if instance.comments:
                result += "<h3>Comments</h3>" + instance.comments
            data.append({'form':form, 'result':result, 'id':instance.id})

    return render_template("old.html", data=data)

@app.route('/add_comment', methods=['GET', 'POST'])
@login_required
def add_comment():
    user = current_user
    if request.method == 'POST' and is_authenticated(user):
        instance = user.%(classname)s.order_by('-id').first()
        instance.comments = request.form.get("comments", None)
        db.session.commit()
    return redirect(url_for('index'))

@app.route('/delete/<id>', methods=['GET', 'POST'])
@login_required
def delete_post(id):
    id = int(id)
    user = current_user
    if is_authenticated(user):
        if id == -1:
            instances = user.%(classname)s.delete()
        else:
            try:
                instance = user.%(classname)s.filter_by(id=id).first()
                db.session.delete(instance)
            except:
                pass

        db.session.commit()
    return redirect(url_for('old'))

if __name__ == '__main__':
    if not os.path.isfile(os.path.join(os.path.dirname(__file__), 'sqlite.db')):
        db.create_all()
    app.run(debug=True)
''' % vars()
    else:
        code += '''
if __name__ == '__main__':
    app.run(debug=True)
''' % vars()

    if pool:
        code += """
    from parampool.pool.UI import write_poolfile
    write_poolfile(pool, '.tmp_pool.dat')
"""

    if outfile is None:
        return code
    else:
        f = open(outfile, 'w')
        f.write(code)
        f.close()
        print "Flask main application written to %s." % outfile
        print "Usage: python %s" % outfile

    if login:
        app = open(app_file, 'w')
        import base64
        password = base64.encodestring('DifficultPW!').strip()
        app.write("""\
import os
from flask import Flask

app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///sqlite.db'
app.secret_key = os.urandom(24)

# Email settings
import base64
app.config.update(
        MAIL_SERVER='smtp.gmail.com',
        MAIL_PORT=587,
        MAIL_USE_TLS=True,
        MAIL_USERNAME = '******',
        MAIL_PASSWORD = base64.decodestring('%(password)s'),
        MAIL_DEFAULT_SENDER = 'Flask %(classname)s Email <*****@*****.**>'
        )""" % vars())
        app.close()
Example #46
0
    yield "C"


print list(enumerate(G()))

print next(iter([]), "default")
print next(iter([]), None)
print next(iter([1]), "default")


class C(object):
    def __init__(self):
        self.a = 1


print vars(C()).items()

print globals().get("not a real variable")
print globals().get("not a real variable", 1)

print hex(12345)
print oct(234)
print hex(0)
print oct(
    0
)  # This should not add an additional leading 0, ie should return "0" not "00"

try:
    print hex([])
except TypeError, e:
    print e
Example #47
0
 def __init__(self):
     super().__init__()
     self.variables = {k: v for k, v in vars(math).items() if not k.startswith("_")}
     self.variables.update(max=max, min=min, abs=abs)
     self.env = {}
Example #48
0

if __name__ == "__main__":
    OPTIONS = argparse.ArgumentParser()
    OPTIONS.add_argument('--emotion',
                         dest='emotion',
                         type=str,
                         default='happy')
    OPTIONS.add_argument('--epochs', dest='epochs', type=int, default=100)
    OPTIONS.add_argument('--output_dim',
                         dest='output_dim',
                         type=int,
                         default=2)
    OPTIONS.add_argument('--patience', dest='patience', type=int, default=20)
    OPTIONS.add_argument('--signiture', dest='signiture', type=str, default='')
    OPTIONS.add_argument('--cuda', dest='cuda', type=bool, default=False)
    OPTIONS.add_argument('--data_path',
                         dest='data_path',
                         type=str,
                         default='directory/to/data/')
    OPTIONS.add_argument('--model_path',
                         dest='model_path',
                         type=str,
                         default='models')
    OPTIONS.add_argument('--output_path',
                         dest='output_path',
                         type=str,
                         default='results')
    PARAMS = vars(OPTIONS.parse_args())
    main(PARAMS)
Example #49
0
#!/usr/bin/env python3

import argparse
import pickle
import shelve


parser = argparse.ArgumentParser(\
        prog='Displays information regarding training or data',\
        description=''
        )

parser.add_argument('-path',
                    default='./train_data/readme',
                    help='location of file, default: ./learning_data/readme')

args = parser.parse_args()
path = str(vars(args)['path'])

print('--------------------------------' + '\n' + 'README INFORMATION')
print('--------------------------------')
with shelve.open(path) as db:
    for key in db:
        print(str(key) + ': ' + str(db[key]))
db.close()
print('--------------------------------')
    # Add misc.
    all_devices.append('\\DEVICE\\HGFS\\VMWARE-HOST\\SHARED FOLDERS\\')

    for device in all_devices:
        yield device

if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(description='Calculates Prefetch hashes based on a given file/kernel path.')
    parser.add_argument('-i', '--input', action='store', help="Path to calculate hash on or path to a file containing multiple paths (one per line). \
    Paths can be in 3 forms:\
    'Users\User\AppData\Local\Temp\svchost.exe', \
    'Users\User\AppData\Local\Temp\svchost.exe', or \
    '\device\harddiskvolume1\Users\User\AppData\Local\Temp\svchost.exe'", required=True)
    parser.add_argument('-b', '--brute_force', action='store_true', help='Iterate possible device paths when calculating Prefetch path hashes')
    args = vars(parser.parse_args())

    path = args['input']
    brute_force = args['brute_force']

    if os.path.exists(path):
        with open(path, 'r') as inny:
            for line in inny:
                if brute_force:
                    for possible_device_path in iterate_devices():
                        prefetch_hasher = Hasher(filepath=line, device=possible_device_path)
                        print(json.dumps(prefetch_hasher.calculate_all(), indent=4, sort_keys=True))
                else:
                    prefetch_hasher = Hasher(line)
                    print(json.dumps(prefetch_hasher.calculate_all(), indent=4, sort_keys=True))
    else:
Example #51
0
from marshmallow import Schema, fields, ValidationError, validates_schema


class S(Schema):
    state = fields.Raw()

    @validates_schema
    def validate_analysis_status(self, data):
        if "state" in data:
            if data["state"].get("status") == "waiting":
                raise ValidationError('analysis status must be after processing status', field_names=["status"])

try:
    print(S(strict=True).load({"state": {"status": "waiting"}}))
except Exception as e:
    print(vars(e))
__author__ = 'JunSong<*****@*****.**>'
import argparse, base64, demo_paramiko

def main(config):
    client = demo_paramiko.SSHClient()
    client.set_missing_host_key_policy(demo_paramiko.AutoAddPolicy())
    client.connect('10.214.155.238', username='******', password='******')
    stdin, stdout, stderr = client.exec_command('ls')
    print(stdout.readlines())
    cmd='adduser sj1'
    stdin,stdout,stderr=client.exec_command(cmd)
    print('stdout:\n%s\nstderr:\n%s\n'%(stdout.read(),stderr.read()))
    client.close()


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('-f', '--file', dest='file', type=str, default='example.txt')
    args = parser.parse_args()
    config = vars(args)
    main(config)


import demo_paramiko
client = demo_paramiko.SSHClient()
client.set_missing_host_key_policy(demo_paramiko.AutoAddPolicy())
client.connect('10.15.82.182', username='******', key_filename='/export/home/udms/.ssh/id_rsa')
stdin, stdout, stderr = client.exec_command('ls /home')
print(stdout.readlines())
Example #53
0
wrapper.  This module replaces the SWIG generated shadow classes.
It also provides iterator objects for efficient node iteration. 

Keith Dart <*****@*****.**>
"""

import sys

import _libsmi

from pycopia.aid import Enum

SmiError = _libsmi.SmiError

# copy libsmi constants and enums into this modules namespace
for name, value in vars(_libsmi).items():
    if name.startswith("SMI_"):
        if type(value) is int:
            setattr(sys.modules[__name__], name, Enum(value,name))
        else:
            setattr(sys.modules[__name__], name, value)

class IndexObjects(list):
    def __init__(self, init=None, implied=False):
        super(IndexObjects, self).__init__(init or [])
        self.implied = bool(implied)
    def __repr__(self):
        cl = self.__class__
        lv = super(IndexObjects, self).__repr__()
        return "%s.%s(%s, %r)" % (cl.__module__, cl.__name__, lv, self.implied)
Example #54
0
    # 1000000 = 16 min
    node = myNode(i, avgSendTime,20)
    nodes.append(node)
    env.process(transmit(env,node))

#prepare show
if (graphics == 1):
    plt.xlim([0, xmax])
    plt.ylim([0, ymax])
    plt.draw()
    plt.show()

# store nodes and basestation locations
with open('nodes.txt', 'w') as nfile:
    for node in nodes:
        nfile.write('{x} {y} {id}\n'.format(**vars(node)))

with open('basestation.txt', 'w') as bfile:
    for basestation in bs:
        bfile.write('{x} {y} {id}\n'.format(**vars(basestation)))

# start simulation
env.run(until=simtime)

# print stats and save into file
# print "nrCollisions ", nrCollisions
# print list of received packets
#print recPackets
print ("nr received packets", len(recPackets))
print ("nr collided packets", len(collidedPackets))
print ("nr lost packets", len(lostPackets))
Example #55
0
def test_classification():
    balanced_stats = Classification(prediction_key="prediction",
                                    decision_key="probability",
                                    labels=LABELS,
                                    population_rates={
                                        True: 0.05,
                                        False: 0.95
                                    })
    balanced_stats.fit(BALANCED_PROBA)
    skewed_stats = Classification(prediction_key="prediction",
                                  decision_key="probability",
                                  labels=LABELS,
                                  population_rates={
                                      True: 0.05,
                                      False: 0.95
                                  })
    skewed_stats.fit(SKEWED_PROBA)

    skewed_stats.format_str({})
    json.dumps(skewed_stats.format_json({}), indent=2)
    balanced_stats.format_str({})
    json.dumps(balanced_stats.format_json({}), indent=2)

    skewed_doc = skewed_stats.format_json({"counts": {}})['counts']
    eq_(skewed_doc['n'], 2000)
    eq_(skewed_doc['labels'][True], 51)
    eq_(skewed_doc['predictions'][True][True], 46)
    balanced_doc = balanced_stats.format_json({"counts": {}})['counts']
    eq_(balanced_doc['n'], 2000)
    eq_(balanced_doc['labels'][True], 1000)
    eq_(balanced_doc['predictions'][True][True], 900)

    skewed_doc = skewed_stats.format_json({"rates": {}})['rates']
    eq_(skewed_doc['sample'][True], 0.026)
    eq_(skewed_doc['population'][True], 0.05)
    balanced_doc = balanced_stats.format_json({"rates": {}})['rates']
    eq_(balanced_doc['sample'][True], 0.5)
    eq_(balanced_doc['population'][True], 0.05)

    skewed_doc = skewed_stats.format_json({"roc_auc": {}})['roc_auc']
    eq_(skewed_doc['micro'], 0.952)
    eq_(skewed_doc['macro'], 0.946)
    balanced_doc = balanced_stats.format_json({"roc_auc": {}})['roc_auc']
    assert abs(balanced_doc['micro'] - skewed_doc['micro']) < 0.025, \
           str(abs(balanced_doc['micro'] - skewed_doc['micro']))
    assert abs(balanced_doc['macro'] - skewed_doc['macro']) < 0.025, \
           str(abs(balanced_doc['macro'] - skewed_doc['macro']))

    skewed_doc = skewed_stats.format_json({"pr_auc": {}})['pr_auc']
    eq_(skewed_doc['micro'], 0.988)
    eq_(skewed_doc['macro'], 0.933)
    balanced_doc = balanced_stats.format_json({"pr_auc": {}})['pr_auc']
    assert abs(balanced_doc['micro'] - skewed_doc['micro']) < 0.025, \
           str(abs(balanced_doc['micro'] - skewed_doc['micro']))
    assert abs(balanced_doc['macro'] - skewed_doc['macro']) < 0.025, \
           str(abs(balanced_doc['macro'] - skewed_doc['macro']))

    skewed_doc = skewed_stats.format_json({"accuracy": {}})['accuracy']
    eq_(skewed_doc['micro'], 0.975)
    eq_(skewed_doc['macro'], 0.975)
    balanced_doc = balanced_stats.format_json({"accuracy": {}})['accuracy']
    assert abs(balanced_doc['micro'] - skewed_doc['micro']) < 0.025, \
           str(abs(balanced_doc['micro'] - skewed_doc['micro']))
    assert abs(balanced_doc['macro'] - skewed_doc['macro']) < 0.025, \
           str(abs(balanced_doc['macro'] - skewed_doc['macro']))

    assert abs(skewed_stats.lookup('accuracy.micro') - 0.975) < 0.025, \
           str(skewed_stats.lookup('accuracy.micro') - 0.975)

    assert abs(skewed_stats.lookup('roc_auc.micro') - 0.975) < 0.025, \
           str(skewed_stats.lookup('roc_auc.micro') - 0.975)

    optimized = skewed_stats.lookup(
        '"maximum recall @ precision >= 0.9".labels.true')
    assert abs(optimized - 0.51) < 0.025, \
           str(optimized - 0.51)

    print(vars(skewed_stats))
    pickle.loads(pickle.dumps(skewed_stats))
Example #56
0
 def get_attributes(self):
     for name, obj in vars(self.__class__).items():
         if type(obj) is property:
             yield name
Example #57
0
def obj_to_json(obj):
    """
    :param obj: a Python object with a .__dict__
    :returns: a JSON string
    """
    return dumps({cls2dotname(obj.__class__): vars(obj)})
def _set_args():
    """
    This function sets the parameters provided by the user
    """
    parser = argparse.ArgumentParser(
        description='BUSCO plot generation tool.\n'
        'Place all BUSCO short summary files (short_summary.[generic|specific].dataset.label.txt) in a single folder. '
        'It will be '
        'your working directory, in which the generated plot files'
        ' will be written'
        '\nSee also the user guide'
        ' for additional information',
        usage=
        'python3 generate_plot.py -wd [WORKING_DIRECTORY] [OTHER OPTIONS]',
        formatter_class=RawTextHelpFormatter,
        add_help=False)

    required = parser.add_argument_group('required arguments')
    optional = parser.add_argument_group('optional arguments')

    required.add_argument('-wd',
                          '--working_directory',
                          metavar='PATH',
                          required=True,
                          dest='working_directory',
                          help='Define the location of your working directory')
    optional.add_argument(
        '-rt',
        '--run_type',
        required=False,
        dest='run_type',
        help='type of summary to use, `generic` or `specific`')
    optional.add_argument(
        '--no_r',
        help=
        'To avoid to run R. It will just create the R script file in the working directory',
        action="store_true",
        dest='no_r')
    optional.add_argument('-q',
                          '--quiet',
                          help='Disable the info logs, displays only errors',
                          action="store_true",
                          dest='quiet')
    optional.add_argument('-h',
                          '--help',
                          action="help",
                          help="Show this help message and exit")
    args = vars(parser.parse_args())
    if args["quiet"]:
        _logger.setLevel(logging.ERROR)
    if args["no_r"]:
        global _no_r
        _no_r = True
    global _plot_dir
    _plot_dir = args["working_directory"]
    if _plot_dir[-1] != '/':
        _plot_dir += '/'
    global _run_type
    _run_type = '*'
    if args["run_type"]:
        _run_type = args["run_type"]
Example #59
0
 def to_dict(self):
     """
     Convert the public attributes into a dictionary
     """
     return {k: v for k, v in vars(self).items()
             if k != 'array' and not k.startswith('_')}
Example #60
0
 def to_dict(self):
     object_dict = vars(self)
     object_dict["targets"] = [vars(each) for each in self.get_targets()]
     return object_dict