Пример #1
0
 def __add__(self, other):  # => a+b
     a = _copy(self.coefficients)
     b = _copy(other.coefficients)
     s = BinaryExtensionModuloConstructor(a ^ b)
     self.xors = other.xors + self.modulodegree-1
     s.xors = self.xors
     return s
Пример #2
0
 def _solve(solver, x0, disp=False, callback=None):
     from copy import deepcopy as _copy
     from mystic.tools import isNull
     if x0 is not None:
         solver.SetInitialPoints(x0)
         if solver._useStrictRange:  #XXX: always, settable, or sync'd ?
             solver.SetStrictRanges(solver._strictMin,
                                    solver._strictMax)
     _term = (solver._live is False) and solver.Terminated()
     if _term is True:
         solver._live = True  #XXX: HACK don't reset _fcalls
     if solver._cost[1] is None:  #XXX: HACK for configured NestedSolver
         solver.SetObjective(cost, ExtraArgs=ExtraArgs)
     solver.Solve(disp=disp, callback=callback)
     if _term is True: solver._live = False
     sm = solver._stepmon
     em = solver._evalmon
     if isNull(sm): sm = ([], [], [], [])
     else:
         sm = _copy(sm)
         sm = (sm._x, sm._y, sm._id, sm._info)
     if isNull(em): em = ([], [], [], [])
     else:
         em = _copy(em)
         em = (em._x, em._y, em._id, em._info)
     return solver, sm, em
Пример #3
0
 def __init__(self, source, **traits):
     HasTraits.__init__(self, **traits)
     nTs = self.time.shape[0]
     if isinstance(source, HasTraits):
         self.traited = True
         self.tracking = source.traits(track=True).keys()
         for attr in self.tracking:
             shape = (nTs,)
             if type(getattr(source, attr)) is _numpy.ndarray:
                 shape += getattr(source, attr).shape
             self.data[attr] = _numpy.zeros(shape, "d")
     else:
         if type(source) is type([]):
             self.tracking = source
             for var in source:
                 self.data[var] = _numpy.zeros((nTs,), "d")
             self._update = _copy(source)
         elif type(source) is type({}):
             self.tracking = source.keys()
             for var in self.tracking:
                 shape = (nTs,) + tuple(source[var])
                 self.data[var] = _numpy.zeros(shape, "d")
             self._update = _copy(self.tracking)
         else:
             raise TypeError, self.__class__.__doc__
     self.source = source
Пример #4
0
 def copy(self):
     temp = _copy(self)
     temp.data = {}
     temp.data.update(self.data)
     temp.break1 = _copy(self.break1)
     temp.break2 = _copy(self.break2)
     return temp
Пример #5
0
 def _step(solver, x0, disp=False, callback=None):
     from copy import deepcopy as _copy
     from mystic.tools import isNull
     #ns = len(solver._stepmon)
     #ne = len(solver._evalmon)
     if x0 is not None:
         solver.SetInitialPoints(x0)
         if solver._useStrictRange:  #XXX: always, settable, or sync'd ?
             solver.SetStrictRanges(solver._strictMin,
                                    solver._strictMax)
     _term = (solver._live is False) and solver.Terminated()
     if _term is True:
         solver._live = True  #XXX: HACK don't reset _fcalls
     solver.Step(cost,
                 ExtraArgs=ExtraArgs,
                 disp=disp,
                 callback=callback)
     if _term is True: solver._live = False
     sm = solver._stepmon
     em = solver._evalmon
     if isNull(sm): sm = ([], [], [], [])
     else:
         sm = _copy(sm)  #[ns:]
         sm = (sm._x, sm._y, sm._id, sm._info)
     if isNull(em): em = ([], [], [], [])
     else:
         em = _copy(em)  #[ne:]
         em = (em._x, em._y, em._id, em._info)
     return solver, sm, em
Пример #6
0
 def __sub__(self, other):  # => a-b
     # bar = -other it is itself
     a = _copy(self.coefficients)
     b = _copy(other.coefficients)
     s = BinaryExtensionModuloConstructor(a ^ b)
     self.xors = other.xors + self.modulodegree-1
     s.xors = self.xors
     return s
Пример #7
0
def render(model,
           resolution,
           cam,
           steps,
           center=(0, 0),
           segmented=False,
           use_light=False,
           path_to_mesh=None):  # pylint: disable=too-many-arguments
    """Render a sequence of views from a fitted body model."""
    assert steps >= 1
    if segmented:
        texture = _os.path.join(_os.path.dirname(__file__), '..', 'models',
                                '3D', 'mask_filled.png')
    else:
        texture = _os.path.join(_os.path.dirname(__file__), '..', 'models',
                                '3D', 'mask_filled_uniform.png')
    if path_to_mesh is None:
        mesh = _copy(_TEMPLATE_MESH)
    else:
        mesh = _copy(_Mesh(path_to_mesh))

    # render ply
    model.betas[:len(cam['betas'])] = cam['betas']
    model.pose[:] = cam['pose']
    model.trans[:] = cam['trans']

    mesh.v = model.r
    w, h = resolution[0], resolution[1]
    dist = _np.abs(cam['t'][2] - _np.mean(mesh.v, axis=0)[2])
    rn = _create_renderer(
        w=w,
        h=h,
        near=1.,
        far=20. + dist,
        rt=_np.array(cam['rt']),
        t=_np.array(cam['t']),
        f=_np.array([cam['f'], cam['f']]),
        # c=_np.array(cam['cam_c']),
        texture=texture)
    light_yrot = _np.radians(120)
    baked_mesh = bake_vertex_colors(mesh)
    base_mesh = _copy(baked_mesh)
    mesh.f = base_mesh.f
    mesh.vc = base_mesh.vc
    renderings = []
    for angle in _np.linspace(0., 2. * (1. - 1. / steps) * _np.pi, steps):
        mesh.v = _rotateY(base_mesh.v, angle)
        imtmp = _simple_renderer(rn=rn,
                                 meshes=[mesh],
                                 yrot=light_yrot,
                                 texture=texture,
                                 use_light=use_light)
        im = _np.zeros(h * w * 3).reshape(((h, w, 3)))
        im[:h, :w, :] = imtmp * 255.
        renderings.append(im)
    return renderings
Пример #8
0
def _simple_renderer(rn, meshes, yrot=0, texture=None, use_light=False):
    mesh = meshes[0]
    if texture is not None:
        if not hasattr(mesh, 'ft'):
            mesh.ft = _copy(mesh.f)
            vt = _copy(mesh.v[:, :2])
            vt -= _np.min(vt, axis=0).reshape((1, -1))
            vt /= _np.max(vt, axis=0).reshape((1, -1))
            mesh.vt = vt
        mesh.texture_filepath = rn.texture_image

    # Set camera parameters
    if texture is not None:
        rn.set(v=mesh.v,
               f=mesh.f,
               vc=mesh.vc,
               ft=mesh.ft,
               vt=mesh.vt,
               bgcolor=_np.ones(3))
    else:
        rn.set(v=mesh.v, f=mesh.f, vc=mesh.vc, bgcolor=_np.ones(3))

    for next_mesh in meshes[1:]:
        _stack_with(rn, next_mesh, texture)

    # Construct light.
    if use_light:
        albedo = rn.vc
        rn.vc = _odr_l.LambertianPointLight(f=rn.f,
                                            v=rn.v,
                                            num_verts=len(rn.v),
                                            light_pos=_rotateY(
                                                _np.array([-200, -100, -100]),
                                                yrot),
                                            vc=albedo,
                                            light_color=_np.array([1, 1, 1]))
        # Construct Left Light
        rn.vc += _odr_l.LambertianPointLight(f=rn.f,
                                             v=rn.v,
                                             num_verts=len(rn.v),
                                             light_pos=_rotateY(
                                                 _np.array([800, 10, 300]),
                                                 yrot),
                                             vc=albedo,
                                             light_color=_np.array([1, 1, 1]))

        # Construct Right Light
        rn.vc += _odr_l.LambertianPointLight(
            f=rn.f,
            v=rn.v,
            num_verts=len(rn.v),
            light_pos=_rotateY(_np.array([-500, 500, 1000]), yrot),
            vc=albedo,
            light_color=_np.array([.7, .7, .7]))
    return rn.r
Пример #9
0
def simple_renderer(rn, meshes, yrot=0):
    """Create a renderer, optionally with texture."""
    mesh = meshes[0]
    if hasattr(rn, 'texture_image'):
        if not hasattr(mesh, 'ft'):
            mesh.ft = _copy(mesh.f)
            vt = _copy(mesh.v[:, :2])
            vt -= _np.min(vt, axis=0).reshape((1, -1))
            vt /= _np.max(vt, axis=0).reshape((1, -1))
            mesh.vt = vt
        mesh.texture_filepath = rn.texture_image
        rn.set(v=mesh.v,
               f=mesh.f,
               vc=mesh.vc,
               ft=mesh.ft,
               vt=mesh.vt,
               bgcolor=_np.ones(3))
    else:
        rn.set(v=mesh.v, f=mesh.f, vc=mesh.vc, bgcolor=_np.ones(3))

    for next_mesh in meshes[1:]:
        _stack_with(rn, next_mesh)  # pylint: disable=undefined-variable

    albedo = rn.vc

    # Construct Back Light (on back right corner)
    rn.vc = _odr_l.LambertianPointLight(f=rn.f,
                                        v=rn.v,
                                        num_verts=len(rn.v),
                                        light_pos=rotateY(
                                            _np.array([-200, -100, -100]),
                                            yrot),
                                        vc=albedo,
                                        light_color=_np.array([1, 1, 1]))

    # Construct Left Light
    rn.vc += _odr_l.LambertianPointLight(f=rn.f,
                                         v=rn.v,
                                         num_verts=len(rn.v),
                                         light_pos=rotateY(
                                             _np.array([800, 10, 300]), yrot),
                                         vc=albedo,
                                         light_color=_np.array([1, 1, 1]))

    # Construct Right Light
    rn.vc += _odr_l.LambertianPointLight(f=rn.f,
                                         v=rn.v,
                                         num_verts=len(rn.v),
                                         light_pos=rotateY(
                                             _np.array([-500, 500, 1000]),
                                             yrot),
                                         vc=albedo,
                                         light_color=_np.array([.7, .7, .7]))
    return rn.r
Пример #10
0
    def _constructor(cls,
                     items=None,
                     copy_items=False,
                     deepcopy_items=False,
                     items_override=None,
                     instance_attr=None,
                     copy_instance_attr=False,
                     deepcopy_instance_attr=False,
                     instance_attr_override=None,
                     memo=None):

        obj = cls.__new__(cls)

        if instance_attr is not None:
            if deepcopy_instance_attr:
                instance_attr = _deepcopy(instance_attr, memo=memo)
            elif copy_instance_attr:
                instance_attr = {
                    name: _copy(item)
                    for name, item in instance_attr.items()
                }

        if instance_attr_override is not None:
            instance_attr = instance_attr or {}
            instance_attr.update(instance_attr_override)

        if instance_attr:
            sentinel = object()
            for name in obj._all_slots:
                item = instance_attr.pop(name, sentinel)
                if item is not sentinel:
                    setattr(obj, name, item)

            if hasattr(obj, '__dict__'):
                obj.__dict__.update(instance_attr)

        if items is not None:
            if items.__class__ is not cls._base_dict:
                items = cls._base_dict(items)
            if deepcopy_items:
                items = _deepcopy(items, memo=memo)
            elif copy_items:
                if cls._base_dict is dict:
                    items = {name: _copy(item) for name, item in items.items()}
                else:
                    items = [(name, _copy(item))
                             for name, item in items.items()]
            obj._base_dict_update(items)

        if items_override is not None:
            obj._base_dict_update(items_override)

        return obj
Пример #11
0
    def copy(self, recurse=False):
        """
        Return a copy of the node, but not copies of children, parent,
        or any attribute that is a Node.

        If `recurse` is True, recursively copy child nodes.

        Args:
            recurse (bool): Whether or not to copy children as well as self.

        Returns:
            Node: A copy of self.

        TODO: test this function.

        RR: This function runs rather slowly -CZ
        """
        newnode = Node()
        for attr, value in self.__dict__.items():
            if (attr not in ("children", "parent")
                    and not isinstance(value, Node)):
                setattr(newnode, attr, _copy(value))
            if recurse:
                newnode.children = [
                    child.copy(True) for child in self.children
                ]
        return newnode
Пример #12
0
    def set(self, section, option, value=None):
        """
        Sets the option value in the provided section
        :param section: The section the config option is in, e.g. "main" or "s3"
        :type section: str
        :param option: The name of the option
        :type option: str
        :param value: The value to be set as the config option
        :type value: Iterable (non-dict) or autoboxable to str
        :raises MultipleInvalid: If the desited section and option are not valid in the cnofig Schema
        """
        _ALLOWED_SCHEMA_OPTIONS = {section.schema: [option.schema for option in self._CONFIG_SCHEMA.schema[section]]
                                   for section in self._CONFIG_SCHEMA.schema}

        # If in bootstrap mode, create the options in advance of the check to see if they're in the schema. This will
        # allow us to set invalid options, but they won't validate on write.
        if self._bootstrap and section not in self._configuration:
            self._configuration[section] = dict()
        if self._bootstrap and option not in self._configuration[section]:
            self._configuration[section][option] = None

        allowed_option = section in _ALLOWED_SCHEMA_OPTIONS and option in _ALLOWED_SCHEMA_OPTIONS[section]

        if allowed_option:
            if isinstance(value, dict):
                raise TypeError("Option values cannot be dictionaries")
            elif isinstance(value, (list, tuple, set)):
                value = ','.join(value)

            self._configuration[section][option] = _copy(value)
        else:
            raise MultipleInvalid("Section:{} and Option:{} not valid on Config Schema".format(section, option))
Пример #13
0
    def __init__(self,
                 name=None,
                 drive_uid=None,
                 creds=None,
                 aclrules=None,
                 cheque=None,
                 max_size=None,
                 autocreate=True):
        """Construct a handle to the drive that the passed user
           calls 'name' on the passed storage service. If
           'autocreate' is True and the user is logged in then
           this will automatically create the drive if
           it doesn't exist already
        """
        self._metadata = None
        self._creds = None

        if creds is not None:
            from Acquire.Client import StorageCreds as _StorageCreds
            if not isinstance(creds, _StorageCreds):
                raise TypeError("creds must be type StorageCreds")

            drive = _get_drive(creds=creds,
                               name=name,
                               drive_uid=drive_uid,
                               aclrules=aclrules,
                               autocreate=autocreate)

            from copy import copy as _copy
            self.__dict__ = _copy(drive.__dict__)
Пример #14
0
 def metadata(self):
     """Return the metadata about this drive"""
     if self.is_null():
         return None
     else:
         from copy import copy as _copy
         return _copy(self._metadata)
 def _clear_dirty(self):
     '''
     Clear the dirty flag after the value has been persisted
     '''
     # Make a new copy of the persisted value
     self._original_value = _copy(self._variables)
     self._dirty = False
Пример #16
0
def _generate_mapjob_tasks(task_prototype, param_set):
    """
    Creates len(param_set) number of tasks. Each of which is a clone of
    task_prototype with a different element of param_set set as its parameters.
    The output of each task will be set to a temporary locations.

    Returns a list of tasks. Also returns
    the mapping of param values to output location.
    """
    param_set = _copy(param_set)
    param_set.reverse()  # Preserve order; we'll be 'popping' in reverse order.

    tasks = []

    num_in_cur_step = 0
    tasks.append([])
    while param_set:

        cur_params = param_set.pop()
        cur_name = "-".join([task_prototype.name, str(0), str(num_in_cur_step)])
        cur_task = task_prototype.clone(cur_name)
        cur_task.set_inputs(cur_params)
        tasks[-1].append(cur_task)
        num_in_cur_step += 1

    return tasks
Пример #17
0
 def insert(self, first_arg, second_arg=None):
     if second_arg is None:
         if isinstance(first_arg, abc.Iterable):
             values = list(first_arg)
             if values:
                 self._tokenizer.reset()
             for value in values:
                 self._tree.insert(value, None)
         else:
             node, inserted = self._tree.insert(first_arg, None)
             if inserted:
                 self._tokenizer.reset()
             return (set.iterator(node, self._tree,
                                  self._tokenizer.create_weak()),
                     inserted)
     elif isinstance(second_arg, LegacyInputIterator):
         if not isinstance(first_arg, type(second_arg)):
             raise TypeError('Both ends of the insertion range '
                             'should have same type, but found: '
                             '{first_type}, {last_type}.'
                             .format(first_type=type(first_arg),
                                     last_type=type(second_arg)))
         first = _copy(first_arg)
         values = []
         while first != second_arg:
             values.append(first.inc().value)
         if values:
             self._tokenizer.reset()
         for value in values:
             self._tree.insert(value, None)
     else:
         self._tokenizer.reset()
         self._tree.insert(second_arg, None)
     return None
Пример #18
0
 def _shallowcopy_attributes(self, dst: "BaseModel") -> None:
     for attr in ["_gene", "_lineage", "_is_bulk"]:
         setattr(dst, attr, _copy(getattr(self, attr)))
     # user is not exposed to this
     dst._obs_names = self._obs_names
     # always deepcopy the model (we're manipulating it in multiple threads/processed)
     dst._model = deepcopy(self.model)
Пример #19
0
def _printLen(x):
    if isinstance(x, _list_iter_type):
        x = list(_copy(x))
    if _isNdArray(x):
        print(x.shape)
    else:
        print(len(x))
Пример #20
0
 def add_tracker(self, name):
     if i != 0:
         return
     if name not in self.data:
         self.data[name] = _numpy.empty((self.time.shape[0],), "d")
         self.tracking += [name]
         self._update = _copy(self.tracking)
Пример #21
0
    def copy(self, recurse=True, _par=None):
        """
        Return a shallow copy of self. If recurse = False, do not copy children,
        parents, or any attribute that is Node.

        Args:
            recurse (bool): Whether or not to copy children as well as self.

        Returns:
            Node: A copy of self.

        """
        for n in self.iternodes():
            self.cached = False
        newnode = Node()
        for attr, value in list(self.__dict__.items()):
            if (attr not in ("children", "parent")
                    and not isinstance(value, Node)):
                setattr(newnode, attr, _copy(value))
        if recurse:
            newnode.children = [
                child.copy(True, _par=newnode) for child in self.children
            ]
            if _par:
                newnode.parent = _par
        for n in self.iternodes():
            self.cached = True
        newnode.set_iternode_cache()
        return newnode
Пример #22
0
 def copy(self, **kwargs) -> IoTyping:
     x = _copy(self)
     for k, v in kwargs.items():
         if not hasattr(x, k):
             raise AttributeError(f"No attribute {k}")
         setattr(x, k, v)
     return x
Пример #23
0
def _find_path(maps, path, current_state, start_state):
    paths = []
    # current_state could not be on maps if it only has outgoing
    # transitions. i.e an initial state you are not able to return to.
    if current_state not in maps:
        return

    for new_transition, from_states in maps[current_state]:
        next_path = _copy(path)
        if new_transition in path:
            # Don't go in a circle
            continue

        next_path.insert(0, new_transition)
        if start_state in from_states:
            paths.append(next_path)
            continue

        for state in from_states:
            recursive_paths = _find_path(
                maps,
                next_path,
                state,
                start_state,
            )
            if recursive_paths:
                paths.append(recursive_paths)

    return len(paths) and min(paths, key=len) or None
Пример #24
0
    def find_path(maps, path, current_state, start_state):
        paths = []
        # current_state could not be on maps if it only has outgoing
        # transitions. i.e an initial state you are not able to return to.
        if current_state not in maps:
            return
        for new_transition, from_states in maps[current_state]:
            next_path = _copy(path)
            if new_transition in path:
                # Don't go in a circle
                continue

            next_path.insert(0, new_transition)
            if start_state in from_states:
                paths.append(next_path)
                continue

            for state in from_states:
                recursive_paths = find_path(
                    maps,
                    next_path,
                    state,
                    start_state,
                )
                if recursive_paths:
                    paths.append(recursive_paths)

        return len(paths) and min(paths, key=len) or None
Пример #25
0
Файл: tree.py Проект: rhr/ivy
    def copy(self, recurse=False):
        """
        Return a copy of the node, but not copies of children, parent,
        or any attribute that is a Node.

        If `recurse` is True, recursively copy child nodes.

        Args:
            recurse (bool): Whether or not to copy children as well as self.

        Returns:
            Node: A copy of self.

        TODO: test this function.

        RR: This function runs rather slowly -CZ
        """
        newnode = Node()
        for attr, value in self.__dict__.items():
            if (attr not in ("children", "parent") and
                not isinstance(value, Node)):
                setattr(newnode, attr, _copy(value))
            if recurse:
                newnode.children = [
                    child.copy(True) for child in self.children
                    ]
        return newnode
Пример #26
0
    def __init__(self,
                 service=None,
                 service_url=None,
                 service_uid=None,
                 service_type=None):
        """Construct the service that is accessed at the remote
           URL 'service_url'. This will fetch and return the
           details of the remote service. This wrapper is a
           chameleon class, and will transform into the
           class type of the fetched service, e.g.

            service = Acquire.Client.Service("https://identity_service_url")
            service.__class__ == Acquire.Identity.IdentityService
        """
        if service is not None:
            from Acquire.Service import Service as _Service
            service = _Service.resolve(service, fetch=True)["service"]
        else:
            try:
                from Acquire.Client import Wallet as _Wallet
                service = _Wallet().get_service(service_url=service_url,
                                                service_uid=service_uid,
                                                service_type=service_type)

            except Exception as e:
                self._failed = True
                raise e

        from copy import copy as _copy
        self.__dict__ = _copy(service.__dict__)
        self.__class__ = service.__class__
Пример #27
0
    def addWatchPoint(cls, f, arg):
        """ watch the name of point in nearest namespace,
for example: if namespace A, B both have var a, the watch pointer may attach to A.a or B.a"""
        try:
            _value = eval(arg, f.f_locals, f.f_globals)
            astT = _ast.parse(arg)
            _id = cls._getId(astT)
            sys.stdout.write("%s %s" % (repr(_value), repr(_id)))
            if _id == 'error':
                raise NoThisVarError

            while f:
                if f.f_locals.has_key(_id) or f.f_globals.has_key(_id):
                    wd = f.f_locals if f.f_locals.has_key(_id) else f.f_globals
                    cls._watchPList.append(
                        [_copy(wd), _id, _value, arg,
                         id(f)])
                    sys.stdout.write("Succ to Add Watch Point")
                    sys.stdout.write('\n')
                    wd = None
                    return
                f = f.f_back

                raise NotFoundError
        except Exception as e:
            sys.stdout.write("Failed to Add Watch Point for %s" % repr(e))
            sys.stdout.write('\n')
Пример #28
0
 def insert(self,
            position: const_iterator,
            second_arg,
            third_arg=None) -> None:
     index = position._index
     if third_arg is None:
         if isinstance(second_arg, abc.Iterable):
             values = list(second_arg)
             if values:
                 self._tokenizer.reset()
             self._values[index:index] = second_arg
         else:
             self._tokenizer.reset()
             self._values.insert(index, second_arg)
     elif isinstance(second_arg, int):
         if second_arg < 0:
             raise ValueError(
                 '`count` should be positive, but found {}.'.format(
                     second_arg))
         if second_arg:
             self._tokenizer.reset()
         self._values[index:index] = repeat(third_arg, second_arg)
     else:
         first = _copy(second_arg)
         values = []
         while first != third_arg:
             values.append(first.inc().value)
         if values:
             self._tokenizer.reset()
         self._values[index:index] = values
Пример #29
0
def TEflagedges(data,altpaths,edges_orig,sgncmb_enum,**kwargs):
    data=_copy(data)
    N=size(altpaths,1)
    flag_list=m_array([cell2mat(altpaths[:,1-1:3+1-1]),2*ones(N,1)]).reshape(1,-1)
    for i in arange(1,N+1).reshape(1,-1).flat:
        new_edge_ind=edges_orig[altpaths[i-1,1-1]-1,:]
        flag_list[i-1,1-1]=find(sgncmb_enum[:,1-1]==new_edge_ind[int(1-1)]&sgncmb_enum[:,2-1]==new_edge_ind[int(2-1)])
        no_triangles=0
        for j in arange(1,length(altpaths[i-1,5-1][1-1])+1).reshape(1,-1).flat:
            if (length(altpaths[i-1,5-1][1-1][j-1])==3):
                no_triangles=no_triangles+1
                new_edge=m_array([altpaths[i-1,5-1][1-1][j-1][int(altpaths[i-1,5-1][1-1][j-1].shape[0]-1-1)],altpaths[i-1,5-1][1-1][j-1][int(altpaths[i-1,5-1][1-1][j-1].shape[0]-1)]]).reshape(1,-1)
                new_edge_ind=edges_orig[edges_orig[:,1-1]==new_edge[int(1-1)]&edges_orig[:,2-1]==new_edge[int(2-1)],:]
                new_edge_ind=find(sgncmb_enum[:,1-1]==new_edge_ind[int(1-1)]&sgncmb_enum[:,2-1]==new_edge_ind[int(2-1)])
                flag_list=cat(1,flag_list,m_array([new_edge_ind,new_edge,4]).reshape(1,-1))
                flag_list[i-1,4-1]=3
    disp(m_array([num2str(no_triangles),' triangle(s) were found by TEflagedges.']).reshape(1,-1))
    duplicates=m_array()
    for i in arange(1,size(flag_list,1)+1).reshape(1,-1).flat:
        ind=flag_list[i-1,1-1]==flag_list[i+1-1:flag_list.shape[0]+1-1,1-1]
        if (sum(ind)>0):
            ind=find(ind)+i
            duplicates=m_array([[duplicates],[ind]]).reshape(1,-1)
    flag_list[duplicates-1,:]=m_array()
    data.n_spuriousedges=size(flag_list,1)
    for i in arange(1,size(flag_list,1)+1).reshape(1,-1).flat:
        if isnan(data.TEpermvalues[flag_list[i-1,1-1]-1,4-1]):
            warning('This edge has already been flagged!')
        ind=flag_list[i-1,1-1]
        data.TEpermvalues[ind-1,:]=m_array([1,0,0,NaN(),flag_list[i-1,4-1],0]).reshape(1,-1)
    return data
Пример #30
0
    def addWatchPoint(cls, f, arg):
        """ watch the name of point in nearest namespace,
for example: if namespace A, B both have var a, the watch pointer may attach to A.a or B.a"""
        try:
            _value = eval(arg, f.f_locals, f.f_globals)
            astT = _ast.parse(arg)
            _id = cls._getId(astT)
            sys.stdout.write("%s %s" % (repr(_value), repr(_id)))
            if _id == 'error':
                raise NoThisVarError
              
            while f:
                if f.f_locals.has_key(_id) or f.f_globals.has_key(_id):
                    wd = f.f_locals if f.f_locals.has_key(_id) else f.f_globals
                    cls._watchPList.append([_copy(wd) , _id, _value, arg, id(f)])
                    sys.stdout.write("Succ to Add Watch Point")
                    sys.stdout.write('\n')
                    wd = None
                    return 
                f = f.f_back
                      
                raise NotFoundError            
        except Exception as e:
            sys.stdout.write("Failed to Add Watch Point for %s"
                             % repr(e))
            sys.stdout.write('\n')
Пример #31
0
    def __setup(self):
        """Construct the series of shell commands, i.e., fill in
           self.__commands"""

        # Create a copy of the toolchain so that it can be modified
        # without impacting the original.
        toolchain = _copy(self.__toolchain)

        # Need to tell it where to find HDF5
        if not toolchain.CPPFLAGS:
            toolchain.CPPFLAGS = '-I{}/include'.format(self.__hdf5_dir)
        if not toolchain.LDFLAGS:
            toolchain.LDFLAGS = '-L{}/lib'.format(self.__hdf5_dir)

        # Version 4.7.0 changed the package name
        if LooseVersion(self.__version) >= LooseVersion('4.7.0'):
            pkgname = 'netcdf-c'
        else:
            pkgname = 'netcdf'
        tarball = '{0}-{1}.tar.gz'.format(pkgname, self.__version)
        url = '{0}/{1}'.format(self.__baseurl, tarball)

        # Download source from web
        self.__commands.append(self.download_step(url=url,
                                                  directory=self.__wd))
        self.__commands.append(
            self.untar_step(tarball=posixpath.join(self.__wd, tarball),
                            directory=self.__wd))

        self.__commands.append(
            self.configure_step(directory=posixpath.join(
                self.__wd, '{0}-{1}'.format(pkgname, self.__version)),
                                toolchain=toolchain))

        self.__commands.append(self.build_step())

        # Check the build
        if self.__check:
            self.__commands.append(self.check_step())

        self.__commands.append(self.install_step())

        # Set library path
        libpath = posixpath.join(self.prefix, 'lib')
        if self.ldconfig:
            self.__commands.append(self.ldcache_step(directory=libpath))
        else:
            self.environment_variables[
                'LD_LIBRARY_PATH'] = '{}:$LD_LIBRARY_PATH'.format(libpath)

        self.__commands.append(
            self.cleanup_step(items=[
                posixpath.join(self.__wd, tarball),
                posixpath.join(self.__wd, '{0}-{1}'.format(
                    pkgname, self.__version))
            ]))

        # Set the environment
        self.environment_variables['PATH'] = '{}:$PATH'.format(
            posixpath.join(self.prefix, 'bin'))
Пример #32
0
 def _xkwds(kwds, **dflts):  # PYCHOK expected
     '''(INTERNAL) Override C{dflts} with specified C{kwds}.
     '''
     d = dflts
     if kwds:
         d = _copy(d)
         d.update(kwds)
     return d
Пример #33
0
def row2feature(row, id_field, geometry_field):

    feature = {'type': 'Feature', 'properties': _copy(row)}
    geometry = feature['properties'].pop(geometry_field)
    feature['geometry'] = _loadshape(_unhexlify(geometry))
    feature['id'] = feature['properties'].pop(id_field)

    return feature
Пример #34
0
def _xkwds(kwds, **dflts):
    '''(INTERNAL) Override C{dflts} with C{kwds}.
    '''
    d = dflts
    if kwds:
        d = _copy(d)
        d.update(kwds)
    return d
Пример #35
0
def row2feature(row, id_field, geometry_field):

    feature = {'type': 'Feature', 'properties': _copy(row)}
    geometry = feature['properties'].pop(geometry_field)
    feature['geometry'] = _loadshape(_unhexlify(geometry))
    feature['id'] = feature['properties'].pop(id_field)
    
    return feature
    def _save_state_to_s3(self):
        # Dump immutable state data to a config
        state = _ConfigParser(allow_no_value=True)
        state.optionxform = str
        state.add_section(PredictiveService._SERVICE_INFO_SECTION_NAME)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'Name', self.name)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'Description', self._description)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'API Key', self._api_key)

        # Save environment, if we have one
        if self._environment:
            state.add_section(PredictiveService._ENVIRONMENT_SECTION_NAME)
            for (key, value) in self._environment._get_state().iteritems():
                state.set(PredictiveService._ENVIRONMENT_SECTION_NAME, key, value)

        # Save deployment version data to config
        state.add_section(PredictiveService._DEPLOYMENT_SECTION_NAME)
        current_predictive_objects = _copy(self._all_predictive_objects)
        for (model_name, info) in current_predictive_objects.iteritems():
            state.set(PredictiveService._DEPLOYMENT_SECTION_NAME, model_name, info['version'])

        state.add_section(PredictiveService._PREDICTIVE_OBJECT_DOCSTRING)
        for (model_name, info) in current_predictive_objects.iteritems():
            state.set(PredictiveService._PREDICTIVE_OBJECT_DOCSTRING, model_name, info['docstring'].encode('string_escape'))

        if self._has_state_changed_on_s3():
            raise IOError("Can not save changes. The Predictive Service has changed on S3. Please "
                          "reload from S3.")

        # Save any new predictive objects to S3.
        for predictive_object_name in self._local_changes:
            (predictive_object, po_info) = self._local_changes[predictive_object_name]
            if predictive_object:         # if this is not a model deletion:
                save_path = self._get_predictive_object_save_path(predictive_object_name, po_info['version'])
                dependency_path = self._get_dependency_save_path(predictive_object_name, po_info['version'])
                predictive_object.save(save_path, dependency_path, self.aws_credentials)

        # Update the revision number after we have successfully written all predictive objects
        self._revision_number += 1
        state.add_section(self._META_SECTION_NAME)
        state.set(self._META_SECTION_NAME, 'Revision Number', self._revision_number)
        state.set(self._META_SECTION_NAME, 'Schema Version', self._schema_version)

        # Write state file to S3
        with _NamedTemporaryFile() as temp_file:
            state.write(temp_file)
            temp_file.flush()
            conn = _connect_s3(**self.aws_credentials)
            bucket = conn.get_bucket(self._s3_bucket_name, validate=False)
            key = _s3_key(bucket)
            key.key = self._s3_state_key
            key.set_contents_from_filename(temp_file.name)
            temp_file.close()  # deletes temp file

        # Update our state
        self._local_changes = {}
        self._predictive_objects = dict(zip(current_predictive_objects.keys(),
            [{'version':info['version'], 'docstring': info['docstring']} for info in current_predictive_objects.values()]))
 def copy(self):
     '''
     make a copy of this image
     '''
     from copy import copy as _copy
     data = self.data.copy()
     copied = EMImage(data, mmap=self._mmap, mode=self._mmap_mode)
     copied.headers = _copy(self.headers)
     return copied
Пример #38
0
 def copy(self, deep=True):  # pylint:disable=overridden-final-method
     """Perform a pandas deep copy of the ELPDData plus a copy of the stored data."""
     copied_obj = pd.Series.copy(self)
     for key in copied_obj.keys():
         if deep:
             copied_obj[key] = _deepcopy(copied_obj[key])
         else:
             copied_obj[key] = _copy(copied_obj[key])
     return ELPDData(copied_obj)
Пример #39
0
 def __mul__(self, other):  # => a*b
     '''
     '''
     if type(other) == int:  # a * n = [a, a,..., a]
         res = []
         for i in range(other):
             res.append(_deepcopy(self))
         return res
     a = _copy(self._coefficients)
     b = _copy(other._coefficients)
     res = self.__multiply__(a, b)
     self._debug_stream("c = a * b = %s * %s = %s"
                        % (self.__interpretToStr__(a),
                           self.__interpretToStr__(b),
                           self.__interpretToStr__(res)))
     p = BinaryExtensionModuloConstructor(res)
     p.xors = self.xors
     return p
Пример #40
0
 def column(self, idx):
     """
     range with given col of current range
     :param idx: indexing is 1-based, negative indices start from last col
     :return: new range object
     """
     coords = _a2cr(self.address)
     if len(coords) == 2:
         return _copy(self)
     else:
         newcoords = _copy(coords)
         if idx < 0:
             newcoords[0] = newcoords[2] + idx + 1
         else:
             newcoords[0] += idx - 1
         newcoords[2] = newcoords[0]
         newaddr = _cr2a(*newcoords)
         return Rng(address=newaddr, sheet=self.sheet)
Пример #41
0
    def __pow__(self, degree):

        new_unit = _copy(self)

        new_unit.value **= degree
        new_unit.units = dict([(k, v * degree)
                               for k, v in new_unit.units.items()])

        return new_unit
Пример #42
0
 def copy(self, deep=True):
     """Perform a pandas deep copy of the ELPDData plus a copy of the stored data."""
     copied_obj = pd.Series.copy(self)
     for key in copied_obj.keys():
         if deep:
             copied_obj[key] = _deepcopy(copied_obj[key])
         else:
             copied_obj[key] = _copy(copied_obj[key])
     return ELPDData(copied_obj)
    def _start_commander_host(env_name, config, s3_folder_path, num_hosts, additional_packages,
                              idle_shutdown_timeout):
        @_file_util.retry(tries=240, delay=2, retry_exception=_requests.exceptions.ConnectionError)
        def _wait_for_host_to_start_up():
            response = _requests.get("http://%s:9004/ping" % commander.public_dns_name)
            if not response:
                raise RuntimeError()

        credentials = config.get_credentials()

        # Set user data for cluster controller
        num_children = num_hosts - 1
        user_data = {
            'auth_token': '', 'AWS_ACCESS_KEY_ID': credentials['aws_access_key_id'],
            'AWS_SECRET_ACCESS_KEY': credentials['aws_secret_access_key'], 'daemon': True,
            'is_cluster_controller': True, 'num_children_host': num_children,
            's3_folder_path': s3_folder_path, 'additional_packages': additional_packages,
            'idle_shutdown_timeout': idle_shutdown_timeout
            }

        # Propagating debug environment variables to user data
        if('GRAPHLAB_TEST_AMI_ID' in _os.environ and 'GRAPHLAB_TEST_ENGINE_URL' in _os.environ
           and 'GRAPHLAB_TEST_OS_URL' in _os.environ and 'GRAPHLAB_TEST_HASH_KEY' in _os.environ):
            user_data['GRAPHLAB_TEST_AMI_ID'] = _os.environ['GRAPHLAB_TEST_AMI_ID']
            user_data['GRAPHLAB_TEST_ENGINE_URL'] = _os.environ['GRAPHLAB_TEST_ENGINE_URL']
            user_data['GRAPHLAB_TEST_OS_URL'] = _os.environ['GRAPHLAB_TEST_OS_URL']
            user_data['GRAPHLAB_TEST_HASH_KEY'] = _os.environ['GRAPHLAB_TEST_HASH_KEY']
        if('GRAPHLAB_TEST_EC2_KEYPAIR' in _os.environ):
            user_data['GRAPHLAB_TEST_EC2_KEYPAIR'] = _os.environ['GRAPHLAB_TEST_EC2_KEYPAIR']

        # Launch the cluster controller
        tags = _copy(config.tags)
        tags['Name'] = env_name
        commander, security_group, subnet_id = _ec2_factory(config.instance_type, region = config.region,
                                 CIDR_rule = config.cidr_ip,
                                 security_group_name = config.security_group,
                                 tags = tags, user_data = user_data,
                                 credentials = credentials,
                                 product_type = _ProductType.DatoDistributed)

        # Log message explaining the additional hosts will not be launched by us.
        if num_children == 1:
            __LOGGER__.info("One additional host will be launched by %s" % commander.instance_id)
        elif num_children > 1:
            __LOGGER__.info("%d additional hosts will be launched by %s"
                            % (num_children, commander.instance_id))

        # Wait for cluster_controller_daemon
        __LOGGER__.info("Waiting for %s to start up." % commander.instance_id)
        try:
            _wait_for_host_to_start_up()
        except:
            raise RuntimeError('Unable to start host(s). Please terminate '
                               'manually from the AWS console.')

        return commander.public_dns_name
Пример #44
0
def _xcopy(inst, deep=False):
    '''(INTERNAL) Copy an object, shallow or deep.

       @arg inst: The object to copy (any C{type}).
       @kwarg deep: If C{True} make a deep, otherwise
                    a shallow copy (C{bool}).

       @return: The copy of B{C{inst}}.
    '''
    return _deepcopy(inst) if deep else _copy(inst)
def row2feature(row, id_field, geometry_field):
    """ Convert a database row dict to a feature dict.
    """
    feature = {'type': 'Feature', 'properties': _copy(row)}

    geometry = feature['properties'].pop(geometry_field)
    feature['geometry'] = _loadshape(_unhexlify(geometry))
    feature['id'] = feature['properties'].pop(id_field)
    
    return feature
Пример #46
0
    def get_models(self):
        '''
        Return a list of models that are served by this policy

        Returns
        --------
        models : list[str]
            A list of models that are served by this policy
        '''
        return _copy(self._models)
    def __init__(self, other=None):
        if isinstance(other, _Service):
            self.__dict__ = _copy(other.__dict__)

            if not self.is_accounting_service():
                raise AccountingServiceError(
                    "Cannot construct an AccountingService from "
                    "a service which is not an accounting service!")
        else:
            _Service.__init__(self)
 def _solve(solver, x0, disp=False, callback=None):
     from copy import deepcopy as _copy
     from mystic.tools import isNull
     if x0 is not None:
         solver.SetInitialPoints(x0)
         if solver._useStrictRange: #XXX: always, settable, or sync'd ?
             solver.SetStrictRanges(solver._strictMin,solver._strictMax)
     solver.Solve(cost,ExtraArgs=ExtraArgs,disp=disp,callback=callback)
     sm = solver._stepmon
     em = solver._evalmon
     if isNull(sm): sm = ([],[],[],[])
     else:
         sm = _copy(sm)
         sm = (sm._x,sm._y,sm._id,sm._info)
     if isNull(em): em = ([],[],[],[])
     else:
         em = _copy(em)
         em = (em._x,em._y,em._id,em._info)
     return solver, sm, em
Пример #49
0
def semilogypn(x,y,specpos,specneg,varargin,**kwargs):
    error(nargchk(1,inf(),nargin()))
    if nargin()<2:
        y=x
        x=arange(1,length(y)+1).reshape(1,-1)
    if nargin()<3:
        specpos='b-'
        specneg='b--'
    else:
        if nargin()<4:
            if findstr(specpos,'--'):
                specneg=strrep(specpos,'--','-')
            else:
                specneg=specpos
                specneg=strrep(specneg,'-.','')
                specneg=strrep(specneg,':','')
                specneg=strrep(specneg,'-','')
                specneg=m_array([specneg,'--']).reshape(1,-1)
    indpos=find(y>0)
    indneg=find(y<0)
    ypos=_copy(y)
    ypos[int(indneg-1)]=0
    yneg=_copy(y)
    yneg[int(indpos-1)]=0
    if isempty(indneg) and  not isempty(indpos):
        hp=semilogy(x,ypos,specpos,varargin[:])
        hn=m_array()
    else:
        if  not isempty(indneg) and isempty(indpos):
            hp=m_array()
            hn=semilogy(x,-yneg,specneg,varargin[:])
        else:
            statehold=ishold()
            hp=semilogy(x,ypos,specpos,varargin[:])
            hold('on')
            hn=semilogy(x,-yneg,specneg,varargin[:])
            if statehold==false():
                hold('off')
    if  not nargout():
        clear('hp','hn')
    return hp,hn
Пример #50
0
 def _preprocess(self, data):
     """
     Internal function to perform fit_transform() on all but last step.
     """
     transformed_data = _copy(data)
     for name, step in self._transformers[:-1]:
         transformed_data = step.fit_transform(transformed_data)
         if type(transformed_data) != _gl.SFrame:
             raise RuntimeError("The transform function in step '%s' did not"
                 " return an SFrame (got %s instead)." % (name,
                                         type(transformed_data).__name__))
     return transformed_data
Пример #51
0
    def variables(self):
        """Getter for variables.

        The specified variables including the new computer variables and
        between subject variables and added variables.

        """

        variables = _copy(self._variables)
        variables.extend(self._subject_variables)
        variables.extend(self._added_variables)
        return variables
Пример #52
0
def array(element, count):
    """Creates an array.

    The array is initialized with the value of ``element`` repeated ``count``
    times. Elements can be read and written using the regular Python index
    syntax.

    For static compilation, ``count`` must be a fixed integer.

    Arrays of arrays are supported.

    """
    return [_copy(element) for i in range(count)]
Пример #53
0
    def _solve(self, id=None, disp=None):
        from copy import deepcopy as _copy
        solver = _copy(self.solver) #FIXME: python2.6
        # configure solver
        solver.id = id
        model = solver._cost[1] #FIXME: HACK b/c Solve(model) is required
        # solve
        disp = self.disp if disp is None else disp
#       import time
#       start = time.time()
        solver.Solve(model, disp=disp)
#       print "TOOK: %s" % (time.time() - start)
        return solver
Пример #54
0
 def _advance(self):
     if self.traited:
         for key in self.tracking:
             self.data[key][self.i] = getattr(self.source, key)
     else:
         if self.i:
             for key in self._update:
                 self.data[key][self.i] = self.data[key][self.i - 1]
         self._update = _copy(self.tracking)
     if self.i + 1 == self.time.shape[0]:
         self.finish()
     else:
         self.i += 1
         self.t = self.time[self.i]
Пример #55
0
def TEdfs_rec(source,target,adjacency_list,visited,bool,**kwargs):
    visited=_copy(visited)
    if source==target:
        bool=1
        return bool
    else:
        for i in arange(1,size(adjacency_list[source-1],2)+1).reshape(1,-1).flat:
            if visited[int(adjacency_list[source-1][1-1,i-1]-1)]==0:
                new_source=adjacency_list[source-1][1-1,i-1]
                visited[int(new_source-1)]=1
                bool=TEdfs_rec(new_source,target,adjacency_list,visited,bool)
            if logical(bool):
                return bool
    return bool
Пример #56
0
 def local_optimize(solver, x0, rank=None, disp=False, callback=None):
     from copy import deepcopy as _copy
     from mystic.tools import isNull
     solver.id = rank
     solver.SetInitialPoints(x0)
     if solver._useStrictRange: #XXX: always, settable, or sync'd ?
         solver.SetStrictRanges(min=solver._strictMin, \
                                max=solver._strictMax) # or lower,upper ?
     solver.Solve(cost, disp=disp, callback=callback)
     sm = solver._stepmon
     em = solver._evalmon
     if isNull(sm): sm = ([],[],[],[])
     else: sm = (_copy(sm._x),_copy(sm._y),_copy(sm._id),_copy(sm._info))
     if isNull(em): em = ([],[],[],[])
     else: em = (_copy(em._x),_copy(em._y),_copy(em._id),_copy(em._info))
     return solver, sm, em
Пример #57
0
    def from_parameterset(cls, params, copy=False):
        """
        Instantiates a CharmmParameterSet from another ParameterSet (or
        subclass). The main thing this feature is responsible for is converting
        lower-case atom type names into all upper-case and decorating the name
        to ensure each atom type name is unique.

        Parameters
        ----------
        params : :class:`parmed.parameters.ParameterSet`
            ParameterSet containing the list of parameters to be converted to a
            CHARMM-compatible set
        copy : bool, optional
            If True, the returned parameter set is a deep copy of ``params``. If
            False, the returned parameter set is a shallow copy. Default False.

        Returns
        -------
        new_params : OpenMMParameterSet
            OpenMMParameterSet with the same parameters as that defined in the
            input parameter set
        """
        new_params = cls()
        if copy:
            # Make a copy so we don't modify the original
            params = _copy(params)
        new_params.atom_types = new_params.atom_types_str = params.atom_types
        new_params.atom_types_int = params.atom_types_int
        new_params.atom_types_tuple = params.atom_types_tuple
        new_params.bond_types = params.bond_types
        new_params.angle_types = params.angle_types
        new_params.urey_bradley_types = params.urey_bradley_types
        new_params.dihedral_types = params.dihedral_types
        new_params.improper_types = params.improper_types
        new_params.improper_periodic_types = params.improper_periodic_types
        new_params.rb_torsion_types = params.rb_torsion_types
        new_params.cmap_types = params.cmap_types
        new_params.nbfix_types = params.nbfix_types
        new_params.pair_types = params.pair_types
        new_params.parametersets = params.parametersets
        new_params._combining_rule = params.combining_rule
        new_params.default_scee = params.default_scee
        new_params.default_scnb = params.default_scnb
        # add only ResidueTemplate instances (no ResidueTemplateContainers)
        for name, residue in iteritems(params.residues):
            if isinstance(residue, ResidueTemplate):
                new_params.residues[name] = residue

        return new_params
    def __init_allSolvers(self):
        'populate NestedSolver state to allSolvers'
        # get the nested solver instance
        solver = self._AbstractEnsembleSolver__get_solver_instance()

        # configure inputs for each solver
        from copy import deepcopy as _copy
        at = self.id if self.id else 0  #XXX start at self.id?
        #at = max((getattr(i, 'id', self.id) or 0) for i in self._allSolvers)
        for i,op in enumerate(self._allSolvers):
            if op is None: #XXX: don't reset existing solvers?
                op = _copy(solver)
                op.id = i
                self._allSolvers[i] = op
        return self._allSolvers
Пример #59
0
	def __call__(self, device):
		assert not hasattr(self, '_value')
		assert self.device_kind is None or device.kind in self.device_kind
		p = device.protocol
		if p == 1.0:
			# HID++ 1.0 devices do not support features
			assert self._rw.kind == RegisterRW.kind
		elif p >= 2.0:
			# HID++ 2.0 devices do not support registers
			assert self._rw.kind == FeatureRW.kind

		o = _copy(self)
		o._value = None
		o._device = device
		return o
Пример #60
0
def setOpts(defaults,options,**kwargs):
    if nargin()==1 or isempty(options):
        user_fields=m_array()
    else:
        if isstruct(options):
            user_fields=fieldnames(options)
        else:
            user_fields=options[1-1:2:options.shape[0]+1-1]
            options=struct(options[:])
    if isstruct(defaults):
        params=_copy(defaults)
    else:
        params=struct(defaults[:])
    for k in arange(1,length(user_fields)+1).reshape(1,-1).flat:
        setfield(params,user_fields[k-1],getfield(options,user_fields[k-1]))
    return params