Exemplo n.º 1
0
    def _load_next(self):
        try:
            try:
                res = self.iterator.next()
                if not isinstance(res, tuple):
                    msg = "Expected tuple (signal, timestamp, value), obtained %s" % describe_type(res)
                    raise ValueError(msg)
                if not len(res) == 3:
                    raise ValueError("Required len 3 tuple; obtained %d." % len(res))
                signal, timestamp, value = res
            except StopIteration:
                raise
            except Exception as e:
                msg = "Could not call next() on user-given iterator.\n"
                msg += "   iterator: %s\n" % str(self.iterator)
                msg += "    of type: %s\n" % describe_type(self.iterator)
                msg += "because of this error:\n"
                msg += indent(string.strip("%s\n%s" % (e, traceback.format_exc(e))), "| ")
                raise ModelExecutionError(msg, self)

            if not isinstance(signal, (str, int)):
                msg = "Expected a string or number for the signal, got %s" % describe_value(signal)
                raise ValueError(msg)

            if not isinstance(timestamp, float):
                msg = "Expected a number for the timestamp, got %s" % describe_value(timestamp)
                raise ValueError(msg)

            self.next_signal = signal
            self.next_timestamp = timestamp
            self.next_value = value
            self.has_next = True
        except StopIteration:
            self.has_next = False
Exemplo n.º 2
0
def torgb(rgb):
    """ Converts all image formats to RGB uint8. """
    
    try:
        check_convertible_to_rgb(rgb)
    except ValueError as e:
        print describe_value(rgb)
        raise BadInput(str(e), None, 0)
    
    if rgb.ndim == 2:
        if rgb.dtype == 'float32':
            rgb = (rgb * 255).astype('uint8')
        return gray2rgb(rgb)
    
    nc = rgb.shape[2]
    if nc == 4:
        return rgb[:, :, :3]

    if nc == 3:
        if rgb.dtype == 'float32':
            return (rgb * 255).astype('uint8')
        elif rgb.dtype == 'uint8':
            return rgb
        else:
            msg = 'Expected image format: %s' % describe_value(rgb)
            raise ValueError(msg)
Exemplo n.º 3
0
 def inverse(self, g):
     try:
         self.belongs(g)
         return np.linalg.inv(g)
     except:
         logger.error('Tried to invert %s' % describe_value(g))
         raise
Exemplo n.º 4
0
def formatm(*args, **kwargs):
    #name_len = 10
    assert len(args) > 0
    assert len(args) % 2 == 0
    cols = []
    for i in range(len(args) / 2):
        name = args[i * 2]
        matrix = args[i * 2 + 1]
        if not isinstance(name, str):
            raise ValueError('I expect a string for label, not %s.'
                             % describe_type(name))
#        varname = '  %s:' % rjust(name, name_len)
        varname = '  %s:' % name

        if  isinstance(matrix, np.ndarray):
#            raise ValueError('I expect a numpy array for value, not %s.' % 
#                             describe_type(matrix))
            value = format_matrix(matrix, **kwargs)
            if matrix.ndim > 1:
                varname = '\n' + varname
        else:
            value = describe_value(matrix)

        cols.append(varname)
        cols.append(value)

    cols = add_spacer(cols)
    return join_columns(cols)
Exemplo n.º 5
0
def cmd_list_states(data_central, argv):
    '''Shows a summary of the states present in DB. '''
    parser = OptionParser(prog='list-states',
                          usage=cmd_list_states.short_usage)
    parser.disable_interspersed_args()
    parser.add_option("-v", dest='verbose', default=False, action='store_true',
                      help="Show more verbose output.")
    (options, args) = parser.parse_args(argv)

    check_no_spurious(args)
    db = data_central.get_agent_state_db()

    combinations = list(db.list_states())
    if not combinations:
        logger.info('No learning states saved in DB.')
    else:
        logger.info('Found %d combinations in DB.' % len(combinations))

    for id_agent, id_robot in combinations:
        logger.info('- Found state a: %-35s  r: %-25s' % (id_agent, id_robot))

        if options.verbose:
            try:
                state = db.get_state(id_robot=id_robot, id_agent=id_agent)
                logger.info('  # episodes: %s' % len(state.id_episodes))
                logger.info('      object: %s' % 
                            describe_value(state.agent_state))
            except Exception as e:
                logger.error('  (could not load state: %s) ' % e)

    if not options.verbose:
        logger.debug('Use -v for more information.')
Exemplo n.º 6
0
def torgb(rgb):
    """ Converts all image formats to RGB uint8. """
    
    try:
        # check_convertible_to_rgb(rgb)
        pass
    except ValueError as e:
        raise BadInput(str(e), None, 0)
    
    if rgb.ndim == 2:
        if rgb.dtype == 'float32':
            m = np.amin(rgb)
            M = np.amax(rgb)
            if m < 0 or M > 1:
                msg = 'If float, I assume range [0,1]; found [%s, %s].' % (m, M)
                raise ValueError(msg)
            rgb = (rgb * 255).astype('uint8')
        return gray2rgb(rgb)
    
    nc = rgb.shape[2]
    if nc == 4:
        return rgb[:, :, :3]

    if nc == 3:
        if rgb.dtype == 'float32':
            return (rgb * 255).astype('uint8')
        elif rgb.dtype == 'uint8':
            return rgb
        else:
            msg = 'Expected image format: %s' % describe_value(rgb)
            raise ValueError(msg)
Exemplo n.º 7
0
def format_obs(d):
    """ Shows objects values and typed for the given dictionary """
    lines = []
    for name, value in d.items():
        lines.append('%15s: %s' % (name, describe_value(value)))
        lines.append('%15s  %s' % ('of type', describe_type(value)))
    return '\n'.join(lines)
Exemplo n.º 8
0
def formatm(*args, **kwargs):
    # name_len = 10
    assert len(args) > 0
    assert len(args) % 2 == 0
    cols = []
    for i in range(int(len(args) / 2)):
        name = args[i * 2]
        matrix = args[i * 2 + 1]
        if not isinstance(name, str):
            raise ValueError('I expect a string for label, not %s.'
                             % describe_type(name))
        #        varname = '  %s:' % rjust(name, name_len)
        varname = '  %s:' % name

        if isinstance(matrix, np.ndarray):
            #            raise ValueError('I expect a numpy array for value, not %s.' %
            #                             describe_type(matrix))
            value = format_matrix(matrix, **kwargs)
            if matrix.ndim > 1:
                varname = '\n' + varname
        else:
            value = describe_value(matrix)

        cols.append(varname)
        cols.append(value)

    cols = add_spacer(cols)
    return join_columns(cols)
Exemplo n.º 9
0
 def _load_next(self):
     try:
         signal, timestamp, value = self.iterator.next()
         if not isinstance(signal, (str, int)):
             msg = ('Expected a string or number for the signal, got %s' % 
                    describe_value(signal))
             raise ValueError(msg)
         if not isinstance(timestamp, float):
             msg = ('Expected a number for the timestamp, got %s' % 
                    describe_value(timestamp))
             raise ValueError(msg)
         
         self.next_signal = signal
         self.next_timestamp = timestamp
         self.next_value = value
         self.has_next = True
     except StopIteration:
         self.has_next = False
Exemplo n.º 10
0
def same_computation(jobargs1, jobargs2):
    """ Returns boolean, string tuple """
    cmd1, args1, kwargs1 = jobargs1
    cmd2, args2, kwargs2 = jobargs2

    equal_command = cmd1 == cmd2
    equal_args = args1 == args2
    equal_kwargs = kwargs1 == kwargs2

    equal = equal_args and equal_kwargs and equal_command
    if not equal:
        reason = ""

        if not equal_command:
            reason += '* function changed \n'
            reason += '  - old: %s \n' % cmd1
            reason += '  - new: %s \n' % cmd2

            # TODO: can we check the actual code?

        warn = ' (or you did not implement proper __eq__)'
        if len(args1) != len(args2):
            reason += '* different number of arguments (%d -> %d)\n' % \
                      (len(args1), len(args2))
        else:
            for i, ob in enumerate(args1):
                if ob != args2[i]:
                    reason += '* arg #%d changed %s \n' % (i, warn)
                    reason += '  - old: %s\n' % describe_value(ob)
                    reason += '  - old: %s\n' % describe_value(args2[i])

        for key, value in kwargs1.items():
            if key not in kwargs2:
                reason += '* kwarg "%s" not found\n' % key
            elif value != kwargs2[key]:
                reason += '* argument "%s" changed %s \n' % (key, warn)
                reason += '  - old: %s \n' % describe_value(value)
                reason += '  - new: %s \n' % describe_value(kwargs2[key])

        # TODO: different lengths

        return False, reason
    else:
        return True, None
Exemplo n.º 11
0
def same_computation(jobargs1, jobargs2):
    """ Returns boolean, string tuple """
    cmd1, args1, kwargs1 = jobargs1
    cmd2, args2, kwargs2 = jobargs2

    equal_command = cmd1 == cmd2
    equal_args = args1 == args2
    equal_kwargs = kwargs1 == kwargs2

    equal = equal_args and equal_kwargs and equal_command
    if not equal:
        reason = ""

        if not equal_command:
            reason += '* function changed \n'
            reason += '  - old: %s \n' % cmd1
            reason += '  - new: %s \n' % cmd2

            # TODO: can we check the actual code?

        warn = ' (or you did not implement proper __eq__)'
        if len(args1) != len(args2):
            reason += '* different number of arguments (%d -> %d)\n' % \
                      (len(args1), len(args2))
        else:
            for i, ob in enumerate(args1):
                if ob != args2[i]:
                    reason += '* arg #%d changed %s \n' % (i, warn)
                    reason += '  - old: %s\n' % describe_value(ob)
                    reason += '  - old: %s\n' % describe_value(args2[i])

        for key, value in kwargs1.items():
            if key not in kwargs2:
                reason += '* kwarg "%s" not found\n' % key
            elif value != kwargs2[key]:
                reason += '* argument "%s" changed %s \n' % (key, warn)
                reason += '  - old: %s \n' % describe_value(value)
                reason += '  - new: %s \n' % describe_value(kwargs2[key])

        # TODO: different lengths

        return False, reason
    else:
        return True, None
Exemplo n.º 12
0
 def check_type(self, x):
     expected = self.ptype
     if self.ptype == float:
         expected = (float, int)
         
     if not isinstance(x, expected):
         msg = ("For param %r, expected %s, got %s.\n%s" % 
                 (self.name, self.ptype, describe_type(x),
                  describe_value(x)))
         raise DecentParamsSemanticError(self.params, self, msg)
Exemplo n.º 13
0
    def __setattr__(self, leaf, value):
        if leaf.startswith('_') or leaf in [
                'mount_points', 'children_already_provided'
        ]:
            return object.__setattr__(self, leaf, value)
        v = self.child(leaf)
        v.check_can_write()

        try:
            v._schema.validate(value)
        except NotValid as e:
            msg = 'Cannot set %s = %s:' % (leaf, describe_value(value))
            raise_wrapped(NotValid, e, msg, compact=True)

        from .memdata_events import event_leaf_set
        from .memdata_events import event_struct_set
        from .memdata_events import event_hash_set
        from .memdata_events import event_list_set

        if self._data[leaf] == value:
            pass
        else:
            # case of a simple data
            if is_simple_data(v._schema):
                event = event_leaf_set(name=self._prefix,
                                       leaf=leaf,
                                       value=value,
                                       **self._get_event_kwargs())
            # struct
            elif isinstance(v._schema, SchemaContext):
                name = self._prefix + (leaf, )
                event = event_struct_set(name=name,
                                         value=value,
                                         **self._get_event_kwargs())
            elif isinstance(v._schema, SchemaHash):
                name = self._prefix + (leaf, )
                event = event_hash_set(name=name,
                                       value=value,
                                       **self._get_event_kwargs())
            elif isinstance(v._schema, SchemaList):
                name = self._prefix + (leaf, )
                event = event_list_set(name=name,
                                       value=value,
                                       **self._get_event_kwargs())
            else:
                raise NotImplementedError(v._schema)
            self._notify(event)

            #             logger.debug('setting leaf %s = %s' % (leaf, value))
            #             logger.debug('setting leaf schema = %s ' % (v._schema))
            self._data[leaf] = value
Exemplo n.º 14
0
def to_yaml(manifold, value, representation=None):
    if representation is None:
        representation = get_default_representation(manifold)
    key = (manifold, representation)
    if not key in converters:
        raise ValueError("Unknown format %s; I know %s." % (key, converters.keys()))
    conv = converters[key]
    try:
        x = conv.to_yaml(value)
    except:
        msg = "Error while trying to convert %s" % describe_value(value)
        logger.error(msg)
        raise
    return ["%s:%s" % (manifold, representation), x]
Exemplo n.º 15
0
    def __str__(self):
        if self.block is not None:
            name = self.block.name
        else:
            name = "(unknown)"

        # TODO: add short bad_value
        s = "Bad input %r for block %r: %s" % (self.input_signal, name, self.error)

        if self.block is not None:
            s += "\n value: %s" % describe_value(self.bad_value)

        s += format_where(self.block)
        return s
Exemplo n.º 16
0
 def get_hint(self, s):
     check_isinstance(s, SchemaBase)
     if s in self.hints:
         return self.hints[s]
     if isinstance(s, (SchemaString, SchemaBytes, SchemaDate)):
         return HintFile()
     if isinstance(s, SchemaHash):
         return HintDir()
     if isinstance(s, SchemaContext):
         return HintDir()
     if isinstance(s, SchemaList):
         return HintDir()
     msg = 'Cannot find hint for %s' % describe_value(s)
     raise ValueError(msg)
Exemplo n.º 17
0
    def __str__(self):
        if self.block is not None:
            name = self.block.name
        else:
            name = '(unknown)'

        # TODO: add short bad_value
        s = ("Bad input %r for block %r: %s" % 
             (self.input_signal, name, self.error))
        
        if self.block is not None:
            s += '\n value: %s' % describe_value(self.bad_value)
        
        s += format_where(self.block)
        return s
Exemplo n.º 18
0
    def check_type(self, x):
        expected = self.ptype
        if self.ptype == float:
            expected = (float, int)

        if six.PY2:
            if self.ptype is unicode:
                if isinstance(x, six.string_types):
                    return
            
        if not isinstance(x, expected):
            msg = ("For param '%s', expected %s, got %s.\n%s" %
                    (self.name, self.ptype, describe_type(x),
                     describe_value(x)))
            raise DecentParamsSemanticError(self.params, self, msg)
Exemplo n.º 19
0
def to_yaml(manifold, value, representation=None):
    if representation is None:
        representation = get_default_representation(manifold)
    key = (manifold, representation)
    if not key in converters:
        raise ValueError('Unknown format %s; I know %s.' % 
                         (key, converters.keys()))
    conv = converters[key]
    try:
        x = conv.to_yaml(value)
    except:
        msg = 'Error while trying to convert %s' % describe_value(value)
        logger.error(msg)
        raise
    return ['%s:%s' % (manifold, representation), x]
Exemplo n.º 20
0
def from_yaml(x):
    if not isinstance(x, list):
        raise ValueError('I expect a list with two elements.')
    form = x[0]
    if not isinstance(form, str):
        raise ValueError('I expect a string describing the format,'
                         ' not %s, while decoding %s' % 
                         (describe_type(form), describe_value(x)))
    value = x[1]
    space, representation = form.split(':')

    key = (space, representation)
    if not key in converters:
        raise ValueError('Unknown format %s; I know %s.' % 
                         (key, converters.keys()))
    conv = converters[key]
    return conv.from_yaml(value)
Exemplo n.º 21
0
def from_yaml(x):
    if not isinstance(x, list):
        raise ValueError("I expect a list with two elements.")
    form = x[0]
    if not isinstance(form, str):
        raise ValueError(
            "I expect a string describing the format,"
            " not %s, while decoding %s" % (describe_type(form), describe_value(x))
        )
    value = x[1]
    space, representation = form.split(":")

    key = (space, representation)
    if not key in converters:
        raise ValueError("Unknown format %s; I know %s." % (key, converters.keys()))
    conv = converters[key]
    return conv.from_yaml(value)
Exemplo n.º 22
0
    def add(self, report, report_type, **kwargs):
        """
            Adds a report to the collection.
            
            :param report: Promise of a Report object
            :param report_type: A string that describes the "type" of the report
            :param kwargs:  str->str,int,float  parameters used for grouping
        """         
        if not isinstance(report_type, str):
            msg = 'Need a string for report_type, got %r.' % describe_value(report_type)
            raise ValueError(msg)
        
        from compmake import Promise
        if not isinstance(report, Promise):
            msg = ('ReportManager is mean to be given Promise objects, '
                   'which are the output of comp(). Obtained: %s' 
                   % describe_type(report))
            raise ValueError(msg)
        
        # check the format is ok
        self._check_report_format(report_type, **kwargs)
        
        key = frozendict2(report=report_type, **kwargs)
        
        if key in self.allreports:
            msg = 'Already added report for %s' % key
            msg += '\n its values is %s' % self.allreports[key]
            msg += '\n new value would be %s' % report
            raise ValueError(msg)

        self.allreports[key] = report

        report_type_sane = report_type.replace('_', '')
        
        key_no_report = dict(**key)
        del key_no_report['report']
        basename = self.html_resources_prefix + report_type_sane
        if key_no_report:
            basename += '-' + basename_from_key(key_no_report)
        
        dirname = os.path.join(self.outdir, report_type_sane)
        filename = os.path.join(dirname, basename) 
        self.allreports_filename[key] = filename + '.html'
Exemplo n.º 23
0
 def main(self, args=None, parent=None):
     """ Main entry point. Returns an integer as an error code. """ 
     
     if "short" in type(self).__dict__:
         msg = 'Class %s uses deprecated attribute "short".' % type(self)
         msg += ' Use "description" instead.'
         self.error(msg)
         
     # Create the parameters and set them using args
     self.parent = parent
     self.set_options_from_args(args)
     ret = self.go()
     if ret is None:
         ret = 0
     
     if isinstance(ret, int):
         return ret
     else:
         msg = 'Expected None or an integer fomr self.go(), got %s' % describe_value(ret)
         raise ValueError(msg)
Exemplo n.º 24
0
 def caller():
     try: 
         args = arguments(x)
     except Exception as e:
         msg = 'Error while preparing test case: %s.\n' % e
         msg += 'Error while calling %s with argument %r' % (arguments, x)
         logger.error(msg)
         raise  
      
     try:
         f(*args)
     except:
         msg = 'Error while executing test %r.\n' % name
         msg += ' f = %s\n' % f
         msg += ' f.__module__ = %s\n' % f.__module__
         msg += ' x = %s\n' % str(x)
         msg += ' arguments() = %s\n' % str(arguments)
         msg += ' arguments(x) has size %d\n' % len(args)
         for i, a in enumerate(args):
             msg += '  arg %d = %s\n' % (i, describe_value(a))
         logger.error(msg)
         raise
Exemplo n.º 25
0
def get_typsy_type(x):
    """ 
        Returns the type of the object x.
        
        There are multiple options that are checked in sequence:
        
        1) The object has a '__typsy_type__' attribute.
        
        2) The object has a callable attribute 'get_typsy_type()'.
        
        3) TODO: the object is a function and it has been decorated
           using contracts.
    """
    if hasattr(x, attrname):
        return getattr(x, attrname)
    elif hasattr(x, funcname):
        func = getattr(x, funcname)
        return func()  # it is already bound
    else:
        msg = 'Could not get typsy type for object.' 
        msg += '\n - object %r' % describe_value(x)
        raise TypsyException(msg) 
Exemplo n.º 26
0
def write_python_data(parent, name, mime, data):
    tables = get_tables()
    from tables.flavor import flavor_of

    hf = parent._v_file
    group = hf.createGroup(parent, name)
    hf.createArray(group, "mime", mime)
    try:
        flavor_of(data)
        ok_pytables = True
    except:
        ok_pytables = False

    # 2014-01-02 XXX this is a hack
    if data == []:
        ok_pytables = False

    if ok_pytables:
        try:
            hf.createArray(group, "value", data)
        except:
            msg = "Error while writing python data"
            msg += "\n parent: %s" % parent
            msg += "\n name: %s" % name
            msg += "\n mime: %s" % mime
            msg += "\n data: %s" % describe_type(data)
            msg += "\n       %s" % describe_value(data)
            msg += "\n flavor: %s" % flavor_of(data)
            msg += "\nraw:\n%s" % data.__repr__()
            logger.error(msg)
            raise
        serialized = "pytables"
    else:
        serialized = "pickle"
        s = StringIO()
        cPickle.dump(data, s, protocol=2)
        hf.createVLArray(group, "pickle", tables.VLStringAtom(), filters=None)
        group.pickle.append(s.getvalue())
    group._v_attrs["reprep_data_format"] = serialized
Exemplo n.º 27
0
    def main(self, args=None, parent=None):
        """ Main entry point. Returns an integer as an error code. """ 
        
        if "short" in type(self).__dict__:
            msg = 'Class %s uses deprecated attribute "short".' % type(self)
            msg += ' Use "description" instead.'
            self.error(msg)
            
        # Create the parameters and set them using args
        self.parent = parent
        self.set_options_from_args(args)

        profile = os.environ.get('QUICKAPP_PROFILE', False)

        if not profile:
            ret = self.go()
        else:

            import cProfile
            out = profile
            print('writing to %r' % out)
            ret = cProfile.runctx('self.go()', globals(), locals(), out)
            import pstats

            p = pstats.Stats(out)
            n = 30
            p.sort_stats('cumulative').print_stats(n)
            p.sort_stats('time').print_stats(n)


        if ret is None:
            ret = 0
        
        if isinstance(ret, int):
            return ret
        else:
            msg = 'Expected None or an integer fomr self.go(), got %s' % describe_value(ret)
            raise ValueError(msg)
Exemplo n.º 28
0
def write_python_data(parent, name, mime, data):
    tables = get_tables()
    from tables.flavor import flavor_of
    hf = parent._v_file
    group = hf.createGroup(parent, name)
    hf.createArray(group, 'mime', mime)
    try:
        flavor_of(data)
        ok_pytables = True
    except:
        ok_pytables = False
    
    # 2014-01-02 XXX this is a hack
    if data == []:
        ok_pytables = False
        
    if ok_pytables: 
        try:
            hf.createArray(group, 'value', data)
        except:
            msg = 'Error while writing python data'
            msg += '\n parent: %s' % parent
            msg += '\n name: %s' % name
            msg += '\n mime: %s' % mime
            msg += '\n data: %s' % describe_type(data)
            msg += '\n       %s' % describe_value(data)
            msg += '\n flavor: %s' % flavor_of(data)
            msg += '\nraw:\n%s' % data.__repr__()
            logger.error(msg)
            raise
        serialized = 'pytables'
    else:
        serialized = 'pickle'
        s = StringIO()
        cPickle.dump(data, s, protocol=2)
        hf.createVLArray(group, 'pickle', tables.VLStringAtom(), filters=None)
        group.pickle.append(s.getvalue())    
    group._v_attrs['reprep_data_format'] = serialized
Exemplo n.º 29
0
    def __init__(self, timestamps, values):
        values = list(values)
        timestamps = list(timestamps)
        if not timestamps:
            msg = 'Empty sequence.'
            raise ValueError(msg)

        if len(timestamps) != len(values):
            msg = 'Length mismatch'
            raise ValueError(msg)

        for t in timestamps:
            if not isinstance(t, (float, int)):
                msg = 'I expected a number, got %s' % describe_value(t)
                raise ValueError(msg)
        for i in range(len(timestamps) - 1):
            dt = timestamps[i + 1] - timestamps[i]
            if dt <= 0:
                msg = 'Invalid dt = %s at i = %s; ts= %s' % (dt, i, timestamps)
                raise ValueError(msg)
        timestamps = list(map(float, timestamps))
        self.timestamps = timestamps
        self.values = values
Exemplo n.º 30
0
    def add(self, context, report, report_type, **kwargs):
        """
            Adds a report to the collection.
            
            :param report: Promise of a Report object
            :param report_type: A string that describes the "type" of the report
            :param kwargs:  str->str,int,float  parameters used for grouping
        """
        from quickapp.compmake_context import CompmakeContext
        assert isinstance(context, CompmakeContext)
        if not isinstance(report_type, six.string_types):
            msg = 'Need a string for report_type, got %r.' % describe_value(report_type)
            raise ValueError(msg)

        if not isinstance(report, Promise):
            msg = ('ReportManager is mean to be given Promise objects, '
                   'which are the output of comp(). Obtained: %s'
                   % describe_type(report))
            raise ValueError(msg)

        # check the format is ok
        self._check_report_format(report_type, **kwargs)

        key = frozendict2(report=report_type, **kwargs)

        if key in self.allreports:
            msg = 'Already added report for %s' % key
            msg += '\n its values is %s' % self.allreports[key]
            msg += '\n new value would be %s' % report
            raise ValueError(msg)

        self.allreports[key] = report

        report_type_sane = report_type.replace('_', '')

        key_no_report = dict(**key)
        del key_no_report['report']
        basename = self.html_resources_prefix + report_type_sane
        if key_no_report:
            basename += '-' + basename_from_key(key_no_report)

        dirname = os.path.join(self.outdir, report_type_sane)
        filename = os.path.join(dirname, basename)
        self.allreports_filename[key] = filename + '.html'

        write_singles = False

        if write_singles:
            is_root = context.currently_executing == ['root']
            if not is_root:
                # Add also a single report independent of a global index

                # don't create the single report for the ones that are
                # defined in the root session

                filename_single = os.path.join(dirname, basename) + '_s.html'
                # filename_index_dyn = os.path.join(dirname, basename) + '_dyn.html'

                report_nid = self.html_resources_prefix + report_type_sane
                if key:
                    report_nid += '-' + basename_from_key(key)
                write_job_id = jobid_minus_prefix(context, report.job_id + '-writes')

                #                 write_report_yaml(report_nid, report_job_id=report.job_id,
                #                                   key=key, html_filename=filename_single,
                #                                   report_html_indexed=filename_index_dyn)

                context.comp(write_report_single,
                             report=report, report_nid=report_nid,
                             report_html=filename_single,
                             write_pickle=False,
                             static_dir=self.static_dir,
                             job_id=write_job_id)
Exemplo n.º 31
0
    def __init__(self,
                 nid: str,
                 data,
                 cols=None,
                 rows=None,
                 fmt=None,
                 caption=None):
        """
            :type data:  (array[R](fields[C]) | array[RxC] | list[R](list[C])
                 ), R>0, C>0
            :type cols:    None|list[C](str)
            :type rows:    None|list[R](str)
            :type caption: None|str
        """

        if fmt is None:
            fmt = "%s"
        self.fmt = fmt

        Node.__init__(self, nid)

        check_isinstance(data, (list, np.ndarray))

        if isinstance(data, list):
            # check minimum length
            if len(data) == 0:
                raise ValueError("Expected at least one row")
            # check that all of them are lists with same type
            for row in data:
                check_isinstance(row, list)
                if not len(row) == len(data[0]):
                    msg = "I want all rows to be the same length" " Got %s != %s." % (
                        len(row),
                        len(data[0]),
                    )
                    raise ValueError(msg)

            # create numpy array
            nrows = len(data)
            ncols = len(data[0])

            if ncols == 0:
                raise ValueError("At least one column expected")

            if cols is None:
                cols = [""] * ncols

            if rows is None:
                rows = [""] * nrows

        elif isinstance(data, np.ndarray):
            if not data.ndim in [1, 2]:
                msg = "Expected array of 1D or 2D shape, got %s." % describe_value(
                    data)
                raise ValueError(msg)

            if data.ndim == 1:
                # use fields name if desc not provided
                if cols is None:  # and data.dtype.fields is not None:
                    cols = list(data.dtype.fields)

                nrows = len(data)

                if rows is None:
                    rows = [""] * nrows

                lol = []
                for row in data:
                    lol.append(list(row))
                data = lol

            elif data.ndim == 2:
                if data.dtype.fields is not None:
                    msg = ("Cannot convert ndarray to table using "
                           "the heuristics that I know (received: %s). " %
                           describe_value(data))
                    raise ValueError(msg)

                nrows = data.shape[0]
                ncols = data.shape[1]

                if rows is None:
                    rows = [""] * nrows
                if cols is None:
                    cols = [""] * ncols

                data = data.tolist()

        else:
            assert False

        #
        #         check_multiple([ (cols, 'list[C](str|None),C>0'),
        #                          (rows, 'list[R](str|None),R>0'),
        #                          (data, 'list[R](list[C])'),
        #                          (caption, 'str|None') ])
        #         print('cols', cols)
        #         print('rows', rows)
        #         print('data', data)
        #         print('cols', cols)

        self.data = data
        self.cols = cols
        self.rows = rows
        self.caption = caption
Exemplo n.º 32
0
 def bail(msg):
     msg += '\n  stream: %s' % self
     msg += '\n   value: %s' % describe_value(x)
     raise BootInvalidValue(msg)
Exemplo n.º 33
0
def streamels_from_spec(shape, format, range, default):  # @ReservedAssignment
    streamels = np.zeros(shape=shape, dtype=streamel_dtype)
    kind = streamels['kind']
    lower = streamels['lower']
    upper = streamels['upper']

    # If the "format" is a string it is valid for all of them
    if isinstance(format, str):
        expect_one_of(format, ValueFormats.valid)
        kind.flat[:] = format
        # at this point the range must be unique
        check_valid_bounds(range)
        lower.flat[:] = range[0]
        upper.flat[:] = range[1]
    else:
        # If the format is not a string, then it must be a list
        if not isinstance(format, list):
            msg = ('Expected list for "format", got %s.'
                   % describe_value(format))
            raise ValueError(msg)

        # And it must have the same number of elements
        formats = np.array(format)
        if formats.shape != streamels.shape:
            msg = ('Expected format shape to be %s instead of %s.' % 
                   (formats.shape, streamels.shape))
            raise ValueError(msg)
        for i in xrange(formats.size):
            expect_one_of(formats.flat[i], ValueFormats.valid)
            kind.flat[i] = formats.flat[i]

        # also 'default' must 

        # Also the range must be a list
        if not isinstance(range, list):
            msg = 'Expected list for "range", got %s.' % describe_value(range)
            raise ValueError(msg)

        if len(streamels.shape) == 1:
            if len(range) != streamels.shape[0]:
                raise ValueError('Expected %s, got %s.' % 
                                 (streamels.shape[0], len(range)))

            for i in xrange(streamels.shape[0]):
                set_streamel_range(streamels[i], range[i])

        elif len(streamels.shape) == 2:
            if len(range) != streamels.shape[0]:
                raise ValueError('Expected %s, got %s.' % 
                                 (streamels.shape[0], len(range)))
            for i in xrange(streamels.shape[0]):
                if len(range[i]) != streamels.shape[1]:
                    raise ValueError('Expected %s, got %s.' % 
                                      (streamels.shape[1], len(range[i])))
                for j in xrange(streamels.shape[1]):
                    set_streamel_range(streamels[i, j], range[i][j])
        else:
            raise ValueError('Not implemented for shape %s.' % 
                             str(streamels.shape))

    # and also the default value must be a number
    if default is None:
        # Use half of the range
        half = (streamels['upper'] + streamels['lower']) / 2.0
        streamels['default'][:] = half  # XXX
        # round down, discretize if discrete
        isdiscrete = kind == ValueFormats.Discrete
        rounded = np.floor(half)
        streamels['default'][isdiscrete] = rounded[isdiscrete]
    elif isinstance(default, Number):
        # 
        streamels['default'].flat[:] = default
    elif isinstance(default, list):  # explicit list
        defaults = np.array(default, dtype=BOOT_OLYMPICS_SENSEL_RESOLUTION)
        if defaults.shape != streamels.shape:
            msg = ('Expected defaults shape to be %s instead of %s.' % 
                   (defaults.shape, streamels.shape))
            raise ValueError(msg)
        if not (np.issubdtype(defaults.dtype, int) or
                np.issubdtype(defaults.dtype, float)):
            msg = ('Expect an array of numbers, not %s.'
                   % describe_value(defaults))
            raise ValueError(msg)
        streamels['default'].flat[:] = defaults.flat
    else:
        msg = 'Could not interpret default value %s.' % describe_value(default)
        raise ValueError(msg)
    return streamels
Exemplo n.º 34
0
def comp_(context, command_, *args, **kwargs):
    """
        Main method to define a computation step.

        Extra arguments:

        :arg:job_id:   sets the job id (respects job_prefix)
        :arg:extra_dep: extra dependencies (not passed as arguments)
        :arg:command_name: used to define job name if job_id not provided.
        If not given, command_.__name__ is used.

        :arg:needs_context: if this is a dynamic job

        Raises UserError if command is not pickable.
    """

    db = context.get_compmake_db()

    command = command_

    if hasattr(command, '__module__') and command.__module__ == '__main__':
        if not command in WarningStorage.warned:
            if WarningStorage.warned:
                # already warned for another function
                msg = ('(Same warning for function %r.)' % command.__name__)
            else:
                msg = ("A warning about the function %r: " % command.__name__)
                msg += (
                    "This function is defined directly in the __main__ "
                    "module, "
                    "which means that it cannot be pickled correctly due to "
                    "a limitation of Python and 'make new_process=1' will "
                    "fail. "
                    "For best results, please define functions in external "
                    "modules. "
                    'For more info, read '
                    'http://stefaanlippens.net/pickleproblem '
                    'and the bug report http://bugs.python.org/issue5509.')
            warning(msg)
            WarningStorage.warned.add(command)

    if get_compmake_status() == CompmakeConstants.compmake_status_slave:
        return None

    # Check that this is a pickable function
    try:
        try_pickling(command)
    except Exception as e:
        msg = ('Cannot pickle function. Make sure it is not a lambda '
               'function or a nested function. (This is a limitation of '
               'Python)')
        raise_wrapped(UserError, e, msg, command=command)

    if CompmakeConstants.command_name_key in kwargs:
        command_desc = kwargs.pop(CompmakeConstants.command_name_key)
    elif hasattr(command, '__name__'):
        command_desc = command.__name__
    else:
        command_desc = type(command).__name__

    args = list(args)  # args is a non iterable tuple

    # Get job id from arguments
    if CompmakeConstants.job_id_key in kwargs:
        # make sure that command does not have itself a job_id key
        try:
            argspec = inspect.getargspec(command)
        except TypeError:
            # Assume Cython function
            # XXX: write test
            pass
        else:
            if CompmakeConstants.job_id_key in argspec.args:
                msg = ("You cannot define the job id in this way because %r "
                       "is already a parameter of this function." %
                       CompmakeConstants.job_id_key)
                raise UserError(msg)

        job_id = kwargs[CompmakeConstants.job_id_key]
        check_isinstance(job_id, six.string_types)
        if ' ' in job_id:
            msg = 'Invalid job id: %r' % job_id
            raise UserError(msg)

        job_prefix = context.get_comp_prefix()
        if job_prefix:
            job_id = '%s-%s' % (job_prefix, job_id)

        del kwargs[CompmakeConstants.job_id_key]

        if context.was_job_defined_in_this_session(job_id):
            # unless it is dynamically geneterated
            if not job_exists(job_id, db=db):
                msg = 'The job %r was defined but not found in DB. I will let it slide.' % job_id
                print(msg)
            else:
                msg = 'The job %r was already defined in this session.' % job_id
                old_job = get_job(job_id, db=db)
                msg += '\n  old_job.defined_by: %s ' % old_job.defined_by
                msg += '\n context.currently_executing: %s ' % context.currently_executing
                msg += ' others defined in session: %s' % context.get_jobs_defined_in_this_session()
                print(msg)
#                 warnings.warn('I know something is more complicated here')
                #             if old_job.defined_by is not None and
                # old_job.defined_by == context.currently_executing:
                #                 # exception, it's ok
                #                 pass
                #             else:

                msg = 'Job %r already defined.' % job_id
                raise UserError(msg)
        else:
            if job_exists(job_id, db=db):
                # ok, you gave us a job_id, but we still need to check whether
                # it is the same job
                stack = context.currently_executing
                defined_by = get_job(job_id, db=db).defined_by
                if defined_by == stack:
                    # this is the same job-redefining
                    pass
                else:

                    for i in range(1000):  # XXX
                        n = '%s-%d' % (job_id, i)
                        if not job_exists(n, db=db):
                            job_id = n
                            break
                        
                    if False:
                        print(
                            'The job_id %r was given explicitly but already '
                            'defined.' % job_id)
                        print('current stack: %s' % stack)
                        print('    its stack: %s' % defined_by)
                        print('New job_id is %s' % job_id)

    else:
        job_id = generate_job_id(command_desc, context=context)

    context.add_job_defined_in_this_session(job_id)

    # could be done better
    if 'needs_context' in kwargs:
        needs_context = True
        del kwargs['needs_context']
    else:
        needs_context = False

    if CompmakeConstants.extra_dep_key in kwargs:
        extra_dep = kwargs[CompmakeConstants.extra_dep_key]
        del kwargs[CompmakeConstants.extra_dep_key]

        if not isinstance(extra_dep, (list, Promise)):
            msg = ('The "extra_dep" argument must be a list of promises; '
                   'got: %s' % describe_value(extra_dep))
            raise ValueError(msg)
        if isinstance(extra_dep, Promise):
            extra_dep = [extra_dep]
        assert isinstance(extra_dep, list)
        for ed in extra_dep:
            if not isinstance(ed, Promise):
                msg = ('The "extra_dep" argument must be a list of promises; '
                       'got: %s' % describe_value(extra_dep))
                raise ValueError(msg)
        extra_dep = collect_dependencies(extra_dep)

    else:
        extra_dep = set()

    children = collect_dependencies([args, kwargs])
    children.update(extra_dep)

    for c in children:
        if not job_exists(c, db):
            msg = "Job %r references a job %r that doesnt exist." % (job_id, c)
            raise ValueError(msg)

    all_args = (command, args, kwargs)

    assert len(context.currently_executing) >= 1
    assert context.currently_executing[0] == 'root'
    
    c = Job(job_id=job_id,
            children=children,
            command_desc=command_desc,
            needs_context=needs_context,
            defined_by=context.currently_executing)
    
    # Need to inherit the pickle
    if context.currently_executing[-1] != 'root':
        parent_job = get_job(context.currently_executing[-1], db)
        c.pickle_main_context = parent_job.pickle_main_context

    if job_exists(job_id, db):
        old_job = get_job(job_id, db)

        if old_job.defined_by != c.defined_by:
            warning('Redefinition of %s: ' % job_id)
            warning(' cur defined_by: %s' % c.defined_by)
            warning(' old defined_by: %s' % old_job.defined_by)

        if old_job.children != c.children:
            #warning('Redefinition problem:')
            #warning(' old children: %s' % (old_job.children))
            #warning(' old dyn children: %s' % old_job.dynamic_children)
            #warning(' new children: %s' % (c.children))

            # fixing this
            for x, deps in old_job.dynamic_children.items():
                if not x in c.children:
                    # not a child any more
                    # FIXME: ok but note it might be a dependence of a child
                    # continue
                    pass
                c.dynamic_children[x] = deps
                for j in deps:
                    if not j in c.children:
                        c.children.add(j)

        if old_job.parents != c.parents:
            # warning('Redefinition of %s: ' % job_id)
            #  warning(' cur parents: %s' % (c.parents))
            # warning(' old parents: %s' % old_job.parents)
            for p in old_job.parents:
                c.parents.add(p)

                # TODO: preserve defines
                #     from compmake.ui.visualization import info
                #     info('defining job %r with children %r' % (job_id,
                # c.children))

                #     if True or c.defined_by == ['root']:

    for child in children:
        db_job_add_parent_relation(child=child, parent=job_id, db=db)

    if get_compmake_config('check_params') and job_exists(job_id, db):
        # OK, this is going to be black magic.
        # We want to load the previous job definition,
        # however, by unpickling(), it will start
        # __import__()ing the modules, perhaps
        # even the one that is calling us.
        # What happens, then is that it will try to
        # add another time this computation recursively.
        # What we do, is that we temporarely switch to
        # slave mode, so that recursive calls to comp()
        # are disabled.
        #             old_status = get_compmake_status()
        #             set_compmake_status(
        # CompmakeConstants.compmake_status_slave)
        all_args_old = get_job_args(job_id, db=db)
        #             set_compmake_status(old_status)
        same, reason = same_computation(all_args, all_args_old)

        if not same:
            #print('different job, cleaning cache:\n%s  ' % reason)
            from compmake.jobs.actions import clean_targets
            clean_targets([job_id], db)
#             if job_cache_exists(job_id, db):
#                 delete_job_cache(job_id, db)
            publish(context, 'job-redefined', job_id=job_id, reason=reason)
        else:
            # print('ok, same job')
            pass
            # XXX TODO clean the cache
            #             else:
            #                 publish(context, 'job-already-defined',
            # job_id=job_id)

    set_job_args(job_id, all_args, db=db)
    set_job(job_id, c, db=db)
    publish(context, 'job-defined', job_id=job_id)

    return Promise(job_id)
Exemplo n.º 35
0
 def init(self):
     self.iterator = self.init_iterator()
     if self.iterator is None:
         msg = 'must return an iterator, got %s' % describe_value(self.iterator)
         raise ValueError(msg)
     self._load_next()
Exemplo n.º 36
0
def comp_(context, command_, *args, **kwargs):
    """
        Main method to define a computation step.

        Extra arguments:

        :arg:job_id:   sets the job id (respects job_prefix)
        :arg:extra_dep: extra dependencies (not passed as arguments)
        :arg:command_name: used to define job name if job_id not provided.
        If not given, command_.__name__ is used.

        :arg:needs_context: if this is a dynamic job

        Raises UserError if command is not pickable.
    """

    db = context.get_compmake_db()

    command = command_

    if hasattr(command, '__module__') and command.__module__ == '__main__':
        if not command in WarningStorage.warned:
            if WarningStorage.warned:
                # already warned for another function
                msg = ('(Same warning for function %r.)' % command.__name__)
            else:
                msg = ("A warning about the function %r: " % command.__name__)
                msg += (
                    "This function is defined directly in the __main__ "
                    "module, "
                    "which means that it cannot be pickled correctly due to "
                    "a limitation of Python and 'make new_process=1' will "
                    "fail. "
                    "For best results, please define functions in external "
                    "modules. "
                    'For more info, read '
                    'http://stefaanlippens.net/pickleproblem '
                    'and the bug report http://bugs.python.org/issue5509.')
            warning(msg)
            WarningStorage.warned.add(command)

    if get_compmake_status() == CompmakeConstants.compmake_status_slave:
        return None

    # Check that this is a pickable function
    try:
        try_pickling(command)
    except Exception as e:
        msg = ('Cannot pickle function. Make sure it is not a lambda '
               'function or a nested function. (This is a limitation of '
               'Python)')
        raise_wrapped(UserError, e, msg, command=command)

    if CompmakeConstants.command_name_key in kwargs:
        command_desc = kwargs.pop(CompmakeConstants.command_name_key)
    elif hasattr(command, '__name__'):
        command_desc = command.__name__
    else:
        command_desc = type(command).__name__

    args = list(args)  # args is a non iterable tuple

    # Get job id from arguments
    if CompmakeConstants.job_id_key in kwargs:
        # make sure that command does not have itself a job_id key
        try:
            argspec = inspect.getargspec(command)
        except TypeError:
            # Assume Cython function
            # XXX: write test
            pass
        else:
            if CompmakeConstants.job_id_key in argspec.args:
                msg = ("You cannot define the job id in this way because %r "
                       "is already a parameter of this function." %
                       CompmakeConstants.job_id_key)
                raise UserError(msg)

        job_id = kwargs[CompmakeConstants.job_id_key]
        check_isinstance(job_id, six.string_types)
        if ' ' in job_id:
            msg = 'Invalid job id: %r' % job_id
            raise UserError(msg)

        job_prefix = context.get_comp_prefix()
        if job_prefix:
            job_id = '%s-%s' % (job_prefix, job_id)

        del kwargs[CompmakeConstants.job_id_key]

        if context.was_job_defined_in_this_session(job_id):
            # unless it is dynamically geneterated
            if not job_exists(job_id, db=db):
                msg = 'The job %r was defined but not found in DB. I will let it slide.' % job_id
                print(msg)
            else:
                msg = 'The job %r was already defined in this session.' % job_id
                old_job = get_job(job_id, db=db)
                msg += '\n  old_job.defined_by: %s ' % old_job.defined_by
                msg += '\n context.currently_executing: %s ' % context.currently_executing
                msg += ' others defined in session: %s' % context.get_jobs_defined_in_this_session(
                )
                print(msg)
                #                 warnings.warn('I know something is more complicated here')
                #             if old_job.defined_by is not None and
                # old_job.defined_by == context.currently_executing:
                #                 # exception, it's ok
                #                 pass
                #             else:

                msg = 'Job %r already defined.' % job_id
                raise UserError(msg)
        else:
            if job_exists(job_id, db=db):
                # ok, you gave us a job_id, but we still need to check whether
                # it is the same job
                stack = context.currently_executing
                defined_by = get_job(job_id, db=db).defined_by
                if defined_by == stack:
                    # this is the same job-redefining
                    pass
                else:

                    for i in range(1000):  # XXX
                        n = '%s-%d' % (job_id, i)
                        if not job_exists(n, db=db):
                            job_id = n
                            break

                    if False:
                        print('The job_id %r was given explicitly but already '
                              'defined.' % job_id)
                        print('current stack: %s' % stack)
                        print('    its stack: %s' % defined_by)
                        print('New job_id is %s' % job_id)

    else:
        job_id = generate_job_id(command_desc, context=context)

    context.add_job_defined_in_this_session(job_id)

    # could be done better
    if 'needs_context' in kwargs:
        needs_context = True
        del kwargs['needs_context']
    else:
        needs_context = False

    if CompmakeConstants.extra_dep_key in kwargs:
        extra_dep = kwargs[CompmakeConstants.extra_dep_key]
        del kwargs[CompmakeConstants.extra_dep_key]

        if not isinstance(extra_dep, (list, Promise)):
            msg = ('The "extra_dep" argument must be a list of promises; '
                   'got: %s' % describe_value(extra_dep))
            raise ValueError(msg)
        if isinstance(extra_dep, Promise):
            extra_dep = [extra_dep]
        assert isinstance(extra_dep, list)
        for ed in extra_dep:
            if not isinstance(ed, Promise):
                msg = ('The "extra_dep" argument must be a list of promises; '
                       'got: %s' % describe_value(extra_dep))
                raise ValueError(msg)
        extra_dep = collect_dependencies(extra_dep)

    else:
        extra_dep = set()

    children = collect_dependencies([args, kwargs])
    children.update(extra_dep)

    for c in children:
        if not job_exists(c, db):
            msg = "Job %r references a job %r that doesnt exist." % (job_id, c)
            raise ValueError(msg)

    all_args = (command, args, kwargs)

    assert len(context.currently_executing) >= 1
    assert context.currently_executing[0] == 'root'

    c = Job(job_id=job_id,
            children=children,
            command_desc=command_desc,
            needs_context=needs_context,
            defined_by=context.currently_executing)

    # Need to inherit the pickle
    if context.currently_executing[-1] != 'root':
        parent_job = get_job(context.currently_executing[-1], db)
        c.pickle_main_context = parent_job.pickle_main_context

    if job_exists(job_id, db):
        old_job = get_job(job_id, db)

        if old_job.defined_by != c.defined_by:
            warning('Redefinition of %s: ' % job_id)
            warning(' cur defined_by: %s' % c.defined_by)
            warning(' old defined_by: %s' % old_job.defined_by)

        if old_job.children != c.children:
            #warning('Redefinition problem:')
            #warning(' old children: %s' % (old_job.children))
            #warning(' old dyn children: %s' % old_job.dynamic_children)
            #warning(' new children: %s' % (c.children))

            # fixing this
            for x, deps in old_job.dynamic_children.items():
                if not x in c.children:
                    # not a child any more
                    # FIXME: ok but note it might be a dependence of a child
                    # continue
                    pass
                c.dynamic_children[x] = deps
                for j in deps:
                    if not j in c.children:
                        c.children.add(j)

        if old_job.parents != c.parents:
            # warning('Redefinition of %s: ' % job_id)
            #  warning(' cur parents: %s' % (c.parents))
            # warning(' old parents: %s' % old_job.parents)
            for p in old_job.parents:
                c.parents.add(p)

                # TODO: preserve defines
                #     from compmake.ui.visualization import info
                #     info('defining job %r with children %r' % (job_id,
                # c.children))

                #     if True or c.defined_by == ['root']:

    for child in children:
        db_job_add_parent_relation(child=child, parent=job_id, db=db)

    if get_compmake_config('check_params') and job_exists(job_id, db):
        # OK, this is going to be black magic.
        # We want to load the previous job definition,
        # however, by unpickling(), it will start
        # __import__()ing the modules, perhaps
        # even the one that is calling us.
        # What happens, then is that it will try to
        # add another time this computation recursively.
        # What we do, is that we temporarely switch to
        # slave mode, so that recursive calls to comp()
        # are disabled.
        #             old_status = get_compmake_status()
        #             set_compmake_status(
        # CompmakeConstants.compmake_status_slave)
        all_args_old = get_job_args(job_id, db=db)
        #             set_compmake_status(old_status)
        same, reason = same_computation(all_args, all_args_old)

        if not same:
            #print('different job, cleaning cache:\n%s  ' % reason)
            from compmake.jobs.actions import clean_targets
            clean_targets([job_id], db)
            #             if job_cache_exists(job_id, db):
            #                 delete_job_cache(job_id, db)
            publish(context, 'job-redefined', job_id=job_id, reason=reason)
        else:
            # print('ok, same job')
            pass
            # XXX TODO clean the cache
            #             else:
            #                 publish(context, 'job-already-defined',
            # job_id=job_id)

    set_job_args(job_id, all_args, db=db)
    set_job(job_id, c, db=db)
    publish(context, 'job-defined', job_id=job_id)

    return Promise(job_id)
Exemplo n.º 37
0
    def __init__(self, nid, data, cols=None, rows=None, fmt=None, caption=None):
        """ 
            :type data:  (array[R](fields[C]) | array[RxC] | list[R](list[C])
                 ), R>0, C>0
            :type cols:    None|list[C](str)
            :type rows:    None|list[R](str)
            :type caption: None|str 
        """

        if fmt is None:
            fmt = "%s"
        self.fmt = fmt

        Node.__init__(self, nid)

        check_isinstance(data, (list, np.ndarray))

        if isinstance(data, list):
            # check minimum length
            if len(data) == 0:
                raise ValueError("Expected at least one row")
            # check that all of them are lists with same type
            for row in data:
                check_isinstance(row, list)
                if not len(row) == len(data[0]):
                    msg = "I want all rows to be the same length" " Got %s != %s." % (len(row), len(data[0]))
                    raise ValueError(msg)

            # create numpy array
            nrows = len(data)
            ncols = len(data[0])

            if ncols == 0:
                raise ValueError("At least one column expected")

            if cols is None:
                cols = [""] * ncols

            if rows is None:
                rows = [""] * nrows

        elif isinstance(data, np.ndarray):
            if not data.ndim in [1, 2]:
                msg = "Expected array of 1D or 2D shape, got %s." % describe_value(data)
                raise ValueError(msg)

            if data.ndim == 1:
                # use fields name if desc not provided
                if cols is None:  # and data.dtype.fields is not None:
                    cols = list(data.dtype.fields)

                nrows = len(data)

                if rows is None:
                    rows = [""] * nrows

                lol = []
                for row in data:
                    lol.append(list(row))
                data = lol

            elif data.ndim == 2:
                if data.dtype.fields is not None:
                    msg = (
                        "Cannot convert ndarray to table using "
                        "the heuristics that I know (received: %s). " % describe_value(data)
                    )
                    raise ValueError(msg)

                nrows = data.shape[0]
                ncols = data.shape[1]

                if rows is None:
                    rows = [""] * nrows
                if cols is None:
                    cols = [""] * ncols

                data = data.tolist()

        else:
            assert False

        #
        #         check_multiple([ (cols, 'list[C](str|None),C>0'),
        #                          (rows, 'list[R](str|None),R>0'),
        #                          (data, 'list[R](list[C])'),
        #                          (caption, 'str|None') ])
        #         print('cols', cols)
        #         print('rows', rows)
        #         print('data', data)
        #         print('cols', cols)

        self.data = data
        self.cols = cols
        self.rows = rows
        self.caption = caption
Exemplo n.º 38
0
def check_type(entry, etype, obtained):
    if not isinstance(obtained, etype):
        msg = 'Error in instantiating code spec:\n\t%s' % str(entry['code']).strip()
        msg += '\nI expected: %s\nbut I got %s' % (etype, describe_value(obtained))
        from conf_tools.exceptions import ConfToolsException
        raise ConfToolsException(msg)