def iou3d(corners_3d_b1, corners_3d_b2, vol):

    corners_3d_b1 = copy.copy(corners_3d_b1)
    corners_3d_b2 = copy.copy(corners_3d_b2)

    corners_3d_b1 = corners_3d_b1.T
    corners_3d_b2 = corners_3d_b2.T

    y_min_b1 = np.min(corners_3d_b1[:, 1])
    y_max_b1 = np.max(corners_3d_b1[:, 1])
    y_min_b2 = np.min(corners_3d_b2[:, 1])
    y_max_b2 = np.max(corners_3d_b2[:, 1])
    y_intersect = np.max(
        [0, np.min([y_max_b1, y_max_b2]) - np.max([y_min_b1, y_min_b2])])

    # set Z as Y
    corners_3d_b1[:, 1] = corners_3d_b1[:, 2]
    corners_3d_b2[:, 1] = corners_3d_b2[:, 2]

    polygon_order = [7, 2, 3, 6, 7]
    box_b1_bev = Polygon([list(corners_3d_b1[i][0:2]) for i in polygon_order])
    box_b2_bev = Polygon([list(corners_3d_b2[i][0:2]) for i in polygon_order])

    intersect_bev = box_b2_bev.intersection(box_b1_bev).area
    intersect_3d = y_intersect * intersect_bev

    iou_bev = intersect_bev / (box_b2_bev.area + box_b1_bev.area -
                               intersect_bev)
    iou_3d = intersect_3d / (vol - intersect_3d)

    return iou_bev, iou_3d
    def _remove_node(self, removing_node):
        word = ''

        if removing_node in self.edges:
            for letter, nodes in self.edges[removing_node].items():
                if removing_node in nodes:
                    word = letter
                    break

        for node, edge in self.edges.items():
            for letter, node_sets in copy.copy(edge).items():
                if removing_node in node_sets and removing_node in self.edges:
                    for to_letter, to_edges in copy.copy(self.edges[removing_node]).items():
                        if to_edges == removing_node:
                            continue
                        for to_edge in to_edges:
                            self._add_throw_edge(node, letter, removing_node, word, to_edge, to_letter)

        for node, edge in self.edges.items():
            for letter, node_sets in edge.items():
                if removing_node in node_sets:
                    self._erase_edge(node, letter, {removing_node})

        if removing_node in self.edges:
            del self.edges[removing_node]

        self.nodes.remove(removing_node)
Exemple #3
0
def iou3d(corners_3d_b1, corners_3d_b2, vol= None):
    """
        Calculates IOU3D of the two set of axis aligned cuboids
        The points in the cuboids should be numpy of the shape (3, 8)
        Axes convention

                Z
               /
              /
             /
            /______ X
            |
            |
            |
            V
            Y

             4 ___________________ 5
              /|                 /|
             / |              1 / |
          0 /__|_______________/  |
            |  |---------------|--|6
            |  /7              |  /
            | /                | /
           2|/_________________|/ 3

    """
    corners_3d_b1 = copy.copy(corners_3d_b1)
    corners_3d_b2 = copy.copy(corners_3d_b2)

    if vol is None:
        vol = get_volume(corners_3d_b1) + get_volume(corners_3d_b2)

    corners_3d_b1 = corners_3d_b1.T
    corners_3d_b2 = corners_3d_b2.T

    y_min_b1 = np.min(corners_3d_b1[:, 1])
    y_max_b1 = np.max(corners_3d_b1[:, 1])
    y_min_b2 = np.min(corners_3d_b2[:, 1])
    y_max_b2 = np.max(corners_3d_b2[:, 1])
    y_intersect = np.max([0, np.min([y_max_b1, y_max_b2]) - np.max([y_min_b1, y_min_b2])])

    # set Z as Y
    corners_3d_b1[:, 1] = corners_3d_b1[:, 2]
    corners_3d_b2[:, 1] = corners_3d_b2[:, 2]

    polygon_order = [7, 2, 3, 6, 7]
    box_b1_bev = Polygon([list(corners_3d_b1[i][0:2]) for i in polygon_order])
    box_b2_bev = Polygon([list(corners_3d_b2[i][0:2]) for i in polygon_order])

    intersect_bev = box_b2_bev.intersection(box_b1_bev).area
    intersect_3d = y_intersect * intersect_bev

    iou_bev = intersect_bev / (box_b2_bev.area + box_b1_bev.area - intersect_bev)
    iou_3d = intersect_3d / (vol - intersect_3d)

    return iou_bev, iou_3d
Exemple #4
0
    def baxter_ik_move(self, limb, rpy_pose):
        quaternion_pose = self.list_to_pose_stamped(rpy_pose, "base")
        node = "ExternalTools/" + limb + "/PositionKinematicsNode/IKService"
        ik_service = rospy.ServiceProxy(node, SolvePositionIK)
        ik_request = SolvePositionIKRequest()
        hdr = Header(stamp=rospy.Time.now(), frame_id="base")

        iterate_IK = True
        n_iterations = 0
        ik_request.pose_stamp.append(quaternion_pose)
        while iterate_IK:
            try:
                rospy.wait_for_service(node, 5.0)
                ik_response = ik_service(ik_request)
            except (rospy.ServiceException, rospy.ROSException), error_message:
                rospy.logerr("Service request failed: %r" % (error_message, ))
                sys.exit("ERROR - baxter_ik_move - Failed to append pose")

            if ik_response.isValid[0]:
                print("PASS: Valid joint configuration found")
                # convert response to joint position control dictionary
                limb_joints = list(ik_response.joints[0].position)
                limb_joints1 = dict(
                    zip(ik_response.joints[0].name,
                        ik_response.joints[0].position))
                iterate_IK = False
            else:
                currentJointState = rospy.wait_for_message(
                    "/robot/joint_states", JointState)
                newJointState = copy.copy(currentJointState)

                seed_angle_val = JointState()
                seed_angle_val.header.stamp = rospy.Time.now()
                current_angle = []
                name_val = []
                for i in range(2, 9):
                    temp_name = copy.copy(currentJointState.name[i])
                    temp_angle = copy.copy(currentJointState.position[i])
                    temp_angle = temp_angle + (random.random() - 0.5) / 10
                    name_val.append(temp_name)
                    current_angle.append(temp_angle)

                seed_angle_val.name = name_val
                seed_angle_val.position = current_angle
                seed_angle_list = [seed_angle_val]
                ik_request.seed_angles = seed_angle_list
                n_iterations = n_iterations + 1
                print n_iterations

            if n_iterations > 50:
                # display invalid move message on head display
                self.splash_screen("Invalid IK", "Solution")

                print 'IK did not converge after %d iterations' % n_iterations
                return
Exemple #5
0
 def queens(self, row, board, ans):
     n = len(board)
     if row == n:
         for i in range(n):
             board[i] = "".join(board[i])
         ans.append(board)
     else:
         for col in range(n):
             if self.isvalid(board, row, col):
                 tmp = copy.copy(board)
                 tmp[row] = copy.copy(board[row])
                 tmp[row][col] = 'Q'
                 self.queens(row + 1, tmp, ans)
Exemple #6
0
def spin(currentList,timesSpin):
    """
    Spins through animals in the current list
    If the arrow lands on switch, switches animal wheel
    If the arrow lands on an animal, it increments that animal's timesLanded
    If any animal's timesLanded equals 3: it stops and declares that animal the winner
    :param currentList: List to spin through
    :param timesSpin: Number of animals it spins through before it stops
    :return: Strings (Print statements)
    """
    spinCounter=1
    winner=False
    print('Playing the game...')
    while winner == False:
        print('Spin '+ str(spinCounter) + ': ')
        while timesSpin>1:
            print(currentList.cursor.data + '->')
            forward(currentList)
            if not hasNext(currentList):
                reset(currentList)
                print(currentList.cursor.data + ' -> ')
            else:
                pass
        while timesSpin==0:
            print(currentList.cursor.data + ' -> ')
            forward(currentList)
            else:
                print(currentList.cursor.data)
            forward(currentList)
            timesSpin-=1
        spinCounter+=1
        if currentList.cursor.data=='Switch':
            print('-----Switching Wheels-----')
            if currentList==list1:
                currentList=list2
            else:
                currentList=list1
        else:
            currentList.cursor.timesLanded+=1
            list1copy=copy.copy(list1)
            list2copy=copy.copy(list2)
            reset(list1copy)
            reset(list2copy)
            while hasNext(list1copy):
                if list1copy.cursor.timesLanded==3:
                    winner=True
            while hasNext(list2copy):
                if list2copy.cursor.timesLanded==3:
                    winner=True
def kepler(Manom,Eanom,eccn):

    itmax=100
    thres = 1.0e-6
    Eold = copy.copy(Eanom)
    Eanom = Manom + eccn * math.sin(Eanom)
    diff = abs(1.0 - Eanom / Eold)
    Eold = copy.copy(Eanom)
    i = 0
    while diff > thres and i < itmax:
        Eanom = Manom + eccn * math.sin(Eanom)
        diff = abs(1.0 - Eanom / Eold)
        Eold = copy.copy(Eanom)
        i += 1

    return Eanom
Exemple #8
0
def _check_and_fix_resolution(input_file: str, block_size: int,
                              output_options_original: dict) -> dict:
    """
    Returns a dictionary containing the output settings, taking into consideration if the video needs to be resized,
    and if it does, changes the pipe_video commands to include dar.
    """
    from dandere2x.dandere2xlib.utils.yaml_utils import load_executable_paths_yaml
    from dandere2x.dandere2xlib.wrappers.ffmpeg.ffmpeg import append_resize_filter_to_pre_process, \
        append_dar_filter_to_pipe_process
    from dandere2x.dandere2xlib.wrappers.ffmpeg.videosettings import VideoSettings
    import copy

    def valid_input_resolution(width: int, height: int, block_size: int):
        return width % block_size == 0 and height % block_size == 0

    new_output_options = copy.copy(output_options_original)

    # get meta-data from the video to do pre-processing
    ffprobe_path = load_executable_paths_yaml()['ffprobe']
    ffmpeg_path = load_executable_paths_yaml()['ffmpeg']
    video_settings = VideoSettings(ffprobe_path, ffmpeg_path, input_file)
    width, height = video_settings.width, video_settings.height

    if not valid_input_resolution(
            width=width, height=height, block_size=block_size):
        print("appending resize filter")
        append_resize_filter_to_pre_process(output_options=new_output_options,
                                            width=width,
                                            height=height,
                                            block_size=block_size)
        append_dar_filter_to_pipe_process(output_options=new_output_options,
                                          width=width,
                                          height=height)

    return new_output_options
Exemple #9
0
def test_tracking_step(lattice, parametr, update_ref_values=False):
    """Tracking step function test
    :parametr=0 - tracking with MethodTM() - params[Undulator] = UndulatorTestTM
    :parametr=1 - tracking with default MethodTM()
    """

    p = Particle(x=0.001, y=0.002)
    p.E = 2.5

    navi = Navigator(lattice)
    dz = 0.01

    P1 = []
    for iii in range(int(lattice[parametr].totalLen / dz)):
        tracking_step(lattice[parametr], [p], dz=dz, navi=navi)
        P1.append(copy.copy(p))

    tracking_step(lattice[parametr], p, dz=dz, navi=navi)

    P1 = obj2dict(P1)

    if update_ref_values:
        return P1

    p_ref = json_read(REF_RES_DIR + sys._getframe().f_code.co_name +
                      str(parametr) + '.json')

    #assert check_dict(P1, p_ref, TOL)
    result = check_dict(P1, p_ref, TOL, assert_info=' P1 - ')
    assert check_result(result)
 def remove_all(self):
     self.make_single_edges()
     self.make_one_final()
     for num, node in enumerate(copy.copy(self.nodes)):
         if node not in self.final and node != self.start:
             self._remove_node(node)
             self.make_tex(num)
Exemple #11
0
def generateAllSentences(doof, currState, currString, parseString, stack):
    global all_CM_sentences
    # all_CM_sentences = set()
    possibleTransitions = [(k[1], v) for k, v in doof.transitions.items()
                           if k[0] == currState]
    possibleTransition_ops = [(k[1], doof.transition_ops[k])
                              for k, v in doof.transitions.items()
                              if k[0] == currState]

    if possibleTransitions != []:
        for i in range(len(possibleTransitions)):
            new_stack = copy.copy(stack)
            new_State = possibleTransitions[i][1]
            new_parseString = parseString
            for op in possibleTransition_ops[i][1]:
                if type(op) == str:
                    if op == "match":
                        new_parseString += " {}".format(
                            possibleTransitions[i][0][:-2])
                    else:  # op is pop
                        new_stack.pop()
                        new_parseString += ")"
                else:  #len is 2
                    new_stack.append("(")
                    new_parseString += " ({}".format(op[1])
            generateAllSentences(
                doof, new_State,
                currString + ' ' + possibleTransitions[i][0][:-2],
                new_parseString, new_stack)
    else:
        all_CM_sentences.add((currString.strip(), parseString))
        # print currString.strip()

    return
def init_params(options, We_init):

    params = OrderedDict()
    # embedding
    params['Wemb'] = copy.copy(We_init)

    # Init the LSTM parameter:
    W = np.concatenate([
        ortho_weight(options.dim),
        ortho_weight(options.dim),
        ortho_weight(options.dim),
        ortho_weight(options.dim)
    ],
                       axis=1)
    params['lstm_W'] = W
    U = np.concatenate([
        ortho_weight(options.dim),
        ortho_weight(options.dim),
        ortho_weight(options.dim),
        ortho_weight(options.dim)
    ],
                       axis=1)
    params['lstm_U'] = U

    b = np.zeros((4 * options.dim))
    params['lstm_b'] = b.astype('float32')

    return params
 def _clear_edges(self):
     for node, edges in copy.deepcopy(self.edges).items():
         for letter, set_ in copy.copy(edges).items():
             if not set_:
                 del self.edges[node][letter]
         if not edges:
             del self.edges[node]
Exemple #14
0
 def __getstate__(self):
     dic = copy.copy(self.__dict__)
     try:
         del dic["_irc"], dic["_replyto"]
     except KeyError:
         pass
     return dic
Exemple #15
0
def combinations(target, data):
    for i in range(len(data)):
        new_target = copy.copy(target)
        new_target.append(data[i])
        new_data = data[i + 1:]
        print(new_target)
        combinations(new_target, new_data)
Exemple #16
0
 def save_user(self, data: dict):
     query = copy.copy(self._query)
     query.update(data)
     query['time'] = maya.now()._epoch
     query['type'] = self.entity
     query['timestamp'] = maya.now()._epoch
     self.save(data)
Exemple #17
0
    def add_parent_post(self, p, recurse=True):
        # print 'adding parent:', p, str(p), type(p), self, type(self), self.childrenNodes
        nm = self.sig() if type(self).__name__ == 'MethodDeclaration' else self.name
        tmp_symtab = self.symtab
        special_syms = {}
        for key,val in tmp_symtab.items():
            if str(key)[:8] == "#unboxer":
                special_syms[key] = val
        if nm not in tmp_symtab:
            self.symtab = {nm:self}
        else:
            self.symtab = {nm:self.symtab[nm]}

        if len(special_syms) > 0:
            self.symtab = dict(special_syms.items() + self.symtab.items())
        if nm and nm not in p.symtab: p.symtab.update({nm:self})
        if self not in p.childrenNodes: p.childrenNodes.append(self)
        self.parentNode = p
        if p.symtab and self.symtab:
            self.symtab = dict(p.symtab.items() + self.symtab.items())
        elif p.symtab:
            self.symtab = copy.copy(p.symtab)

        if recurse:
            for c in self.childrenNodes:
                if c: c.add_parent_post(self, True)
Exemple #18
0
    def add_parent_post(self, p, recurse=True):
        # print 'adding parent:', p, str(p), type(p), self, type(self), self.childrenNodes
        nm = self.sig() if type(
            self).__name__ == 'MethodDeclaration' else self.name
        tmp_symtab = self.symtab
        special_syms = {}
        for key, val in tmp_symtab.items():
            if str(key)[:8] == "#unboxer":
                special_syms[key] = val
        if nm not in tmp_symtab:
            self.symtab = {nm: self}
        else:
            self.symtab = {nm: self.symtab[nm]}

        if len(special_syms) > 0:
            self.symtab = dict(special_syms.items() + self.symtab.items())
        if nm and nm not in p.symtab: p.symtab.update({nm: self})
        if self not in p.childrenNodes: p.childrenNodes.append(self)
        self.parentNode = p
        if p.symtab and self.symtab:
            self.symtab = dict(p.symtab.items() + self.symtab.items())
        elif p.symtab:
            self.symtab = copy.copy(p.symtab)

        if recurse:
            for c in self.childrenNodes:
                if c: c.add_parent_post(self, True)
Exemple #19
0
 def __init__(self, predicates=[], response_transformers=[],
              request_transformers=[]):
     import copy
     self.predicates = copy.copy(predicates)
     self.response_transformers = copy.copy(response_transformers)
     self.request_transformers = copy.copy(request_transformers)
     for method in  ["GET", "PUT", "DELETE", "POST", "HEAD"]:
         for cm in self.__dict__.keys():
             if cm.find("_http") == 0:
                 new_method = cm.replace("_http", method, 1)
                 if not new_method in self.__dict__:
                     self.__dict__[new_method] = partial(
                         self.__class__._dispatch,
                         self,
                         method=method,
                         desc=self.__dict__[cm])
Exemple #20
0
def test_copy_2():
    import copy

    ensure_tainted(
        copy.copy(TAINTED_LIST),
        copy.deepcopy(TAINTED_LIST),
    )
def size_to_rel_size_and_len(size, input_len):
    if size >= 1: 
        len_ = copy.copy(size)
        size = size / input_len
    else:
        len_ = round(input_len * size)
    return size, len_
Exemple #22
0
    def lookup_attack(self, atk_name: str) -> Dict[str, Any]:
        '''
        Return the atk dict given by static.lookup_attack, but with fixed
        synergy (e.g. from KOs or switches), and handle a few other things
        like Shuine's Horn.
        '''
        from copy import copy
        from .gear import ShuinesHorn

        attack = lookup_attack(atk_name)
        if 'synergy type' in attack:
            syn_type = attack['synergy type']
            if self.ally and syn_type in self.ally.types:
                attack = lookup_attack(f'{attack["name"]} +{syn_type.detail}')
        elif 'synergy attack' in attack:
            synergy_type = attack['name'].split(' +')[1]
            if ((not self.ally) or Types[synergy_type] not in self.ally.types):
                attack = lookup_attack(attack['name'].split(' +')[0])

        if (attack['type'] == Types.toxic and self.gear is ShuinesHorn
                and not self.seized):
            attack = copy.copy(attack)
            # Don't need copy.deepcopy, as we only change a top-level
            # property (type)
            attack['type'] = Types.water

        return attack
Exemple #23
0
    def __init__(self,
                 value=45,
                 std=4,
                 vcw=3,
                 p=np.array([[0, 0, 0], [10, 10, 10]])):
        """



        Attributes
        ----------

        tdoa = (|F1M| - |F2M|)/0.3 = (|p1M| - |p2M|)/0.3    (ns)


        value : Time difference of Arrival  (ns)    30 ns  = 9 m
        std   : Standard deviation on tdoa  (ns)    1 ns   = 30cm
        vcw   : constraint with factor


        Methods
        -------

        vrai(deltar)  : check constraint validity

        """
        Constraint.__init__(self, 'TDOA', p)

        #
        # (vn,wn,tn) triedre orthonormal
        #
        #       self.Dmax = 25 # limit of tdoa box in meter
        ##
        self.tdoa_axes(p)

        self.f = self.nv / 2
        self.Dmax = self.nv
        self.value = min(value, 2 * self.f / 0.3)
        self.value = max(self.value, -2 * self.f / 0.3)
        self.std = std
        self.vcw = vcw
        self.drange = self.value * 0.3
        self.sstd = self.std * 0.3
        self.tdoa_box(vcw)
        #               if self.ndim == 3:
        #                       BOUND1 = np.array([0.0,0.0,-2.0])
        #                       BOUND2 = np.array([20.0,20.0,2.0])
        #                       box         = BoxN(np.vstack((BOUND1,BOUND2)),ndim=np.shape(self.p)[1])
        #               else:
        #                       BOUND1 = np.array([0.0,0.0])
        #                       BOUND2 = np.array([20.0,20.0])
        #                       box         = BoxN(np.vstack((BOUND1,BOUND2)),ndim=np.shape(self.p)[1])

        #               self.lbox    = LBoxN([box],ndim=np.shape(self.p)[1])

        self.annulus_bound()
        self.Id = copy.copy(self.C_Id)
        Constraint.C_Id = Constraint.C_Id + \
            1   # constraint counter is incremented
Exemple #24
0
def test_affine_copy():
    incs, outcs, aff = affine_v2w()
    cm = AffineTransform(incs, outcs, aff)
    import copy
    cmcp = copy.copy(cm)
    assert_array_equal(cmcp.affine, cm.affine)
    assert_equal(cmcp.function_domain, cm.function_domain)
    assert_equal(cmcp.function_range, cm.function_range)
Exemple #25
0
	def get_matrix(self):
		"""Retorna una copia 'superficial' (por valor) de la matriz"""
		dim = self.get_dim()
		matrix = []
		for i in range(dim):
			temp = copy.copy(self.__matrix[i])
			matrix.append(temp)
		return matrix
Exemple #26
0
def test_affine_copy():
    incs, outcs, aff = affine_v2w()
    cm = AffineTransform(incs, outcs, aff)
    import copy
    cmcp = copy.copy(cm)
    assert_array_equal(cmcp.affine, cm.affine)
    assert_equal(cmcp.function_domain, cm.function_domain)
    assert_equal(cmcp.function_range, cm.function_range)
def get_url_from_url_mask(pUrlMask, pInstrument, pPeriod, pStartTime, pEndTime):
    # Returns a Url containing no {} characters.
    returnValue = copy.copy(pUrlMask);
    returnValue = returnValue.replace('{instrument}', pInstrument);
    returnValue = returnValue.replace('{start}', pStartTime)
    returnValue = returnValue.replace('{end}', pEndTime)
    returnValue = returnValue.replace('{period}', pPeriod)
    return returnValue;
Exemple #28
0
def test_comap_copy():
    import copy
    incs, outcs, map, inv = voxel_to_world()
    cm = CoordinateMap(incs, outcs, inv, map)
    cmcp = copy.copy(cm)
    yield assert_equal, cmcp.function, cm.function
    yield assert_equal, cmcp.function_domain, cm.function_domain
    yield assert_equal, cmcp.function_range, cm.function_range
    yield assert_equal, cmcp.inverse_function, cm.inverse_function
Exemple #29
0
 def __init__(self,
              predicates=[],
              response_transformers=[],
              request_transformers=[]):
     import copy
     self.predicates = copy.copy(predicates)
     self.response_transformers = copy.copy(response_transformers)
     self.request_transformers = copy.copy(request_transformers)
     for method in ["GET", "PUT", "DELETE", "POST", "HEAD"]:
         for cm in self.__dict__.keys():
             if cm.find("_http") == 0:
                 new_method = cm.replace("_http", method, 1)
                 if not new_method in self.__dict__:
                     self.__dict__[new_method] = partial(
                         self.__class__._dispatch,
                         self,
                         method=method,
                         desc=self.__dict__[cm])
Exemple #30
0
def test_comap_copy():
    import copy
    incs, outcs, map, inv = voxel_to_world()
    cm = CoordinateMap(incs, outcs, inv, map)
    cmcp = copy.copy(cm)
    yield assert_equal, cmcp.function, cm.function
    yield assert_equal, cmcp.function_domain, cm.function_domain
    yield assert_equal, cmcp.function_range, cm.function_range
    yield assert_equal, cmcp.inverse_function, cm.inverse_function
Exemple #31
0
def draw_board(tiles):
    tiles = copy.copy(tiles)
    for tile in tiles:
        for loc in range(len(tile)):
            tile[loc] = str(tile[loc]).replace("0",
                                               " ").replace("2", "B").replace(
                                                   "3", "_").replace("4", "O")

    print("\n".join("".join(str(i) for i in tile) for tile in tiles))
Exemple #32
0
    def bfs_adapt(self,
                  reward_window,
                  progress_window,
                  cost_window,
                  prop_window,
                  distance_window,
                  update_trace_panel,
                  timer=30,
                  lock=None):

        plot_data = {
            "rewards": [],
            "progress": [],
            "cost": [],
            "props": [],
            "distances": [],
            "time": []
        }

        #for i in range(2):
        #print("Day {}".format(i))

        self.log.open()
        for i in range(0, 1):
            bfs = BFSAdapt(self.TS, self.micro_selection,
                           self.consolidated_trajs, self.inputs, self.outputs,
                           self.freqs, self.mod_perc, self.path_to_interaction,
                           update_trace_panel, self.log,
                           self.combined_raw_trajs)
            self.TS, st_reachables, correctness_trajs = bfs.adapt(timer, lock)

            tb = TraceGenerator(self.TS, self.inputs.alphabet)
            trajs = tb.get_trajectories(150)
            for traj in trajs:
                self.trajs.append(traj)
            for traj in trajs:
                self.raw_trajs.append(traj.copy())

            combined_raw_traj_dict = {}
            self.combined_raw_trajs = []
            self.ignore_duplicate_trajectories(self.raw_trajs,
                                               combined_raw_traj_dict,
                                               self.combined_raw_trajs)

            self.original_rewards = self.offset_rewards(self.baseline)
            self.consolidate_trajectories()
            self.generate_prefixes(self.consolidated_trajs)

            original_interaction_trajs = copy.copy(self.trajs)

        self.log.close()
        self.json_exp.export_from_object(self.TS, st_reachables, self.freqs)

        # export the interaction
        exporter = TSExporter(self.TS, self.json_data)
        exporter.export(self.result_file_dir)
Exemple #33
0
    def __init__(self, value=45, std=4, vcw=3, p=np.array([[0, 0, 0], [10, 10, 10]])):
        """



        Attributes
        ----------

        tdoa = (|F1M| - |F2M|)/0.3 = (|p1M| - |p2M|)/0.3    (ns)


        value : Time difference of Arrival  (ns)    30 ns  = 9 m
        std   : Standard deviation on tdoa  (ns)    1 ns   = 30cm
        vcw   : constraint with factor


        Methods
        -------

        vrai(deltar)  : check constraint validity

        """
        Constraint.__init__(self, 'TDOA', p)

        #
        # (vn,wn,tn) triedre orthonormal
        #
    #       self.Dmax = 25 # limit of tdoa box in meter
        ##
        self.tdoa_axes(p)

        self.f = self.nv / 2
        self.Dmax = self.nv
        self.value = min(value, 2 * self.f / 0.3)
        self.value = max(self.value, -2 * self.f / 0.3)
        self.std = std
        self.vcw = vcw
        self.drange = self.value * 0.3
        self.sstd = self.std * 0.3
        self.tdoa_box(vcw)
#               if self.ndim == 3:
#                       BOUND1 = np.array([0.0,0.0,-2.0])
#                       BOUND2 = np.array([20.0,20.0,2.0])
#                       box         = BoxN(np.vstack((BOUND1,BOUND2)),ndim=np.shape(self.p)[1])
#               else:
#                       BOUND1 = np.array([0.0,0.0])
#                       BOUND2 = np.array([20.0,20.0])
#                       box         = BoxN(np.vstack((BOUND1,BOUND2)),ndim=np.shape(self.p)[1])

#               self.lbox    = LBoxN([box],ndim=np.shape(self.p)[1])

        self.annulus_bound()
        self.Id = copy.copy(self.C_Id)
        Constraint.C_Id = Constraint.C_Id + \
            1   # constraint counter is incremented
Exemple #34
0
    def function(self, function_type,**kwargs):
        """Factory for repo function objects.

           Ar        gs: self, function_type
        Returns: product.RepoBasisFunction
        """
        def _make_function():
            basis_function = RepoBasisFunction(category='SubfunctionType',
                                               subtype=function_type,
                                               entry=basis_entry,
                                               **kwargs)

            return basis_function

        try:
            self.counters['SubfunctionType'][function_type] += 1
        except KeyError:
#            print self.counters
            self.counters['SubfunctionType'][function_type] = 1

        basis_entry = self._basis['SubfunctionType'][function_type]
        if self.common:
            try:
                basis_function = self._basis['SubfunctionType'][function_type]['object']
            except KeyError as ke:
                basis_function = _make_function()
                self._basis['SubfunctionType'][function_type]['object'] = basis_function
            if 'etree' in kwargs:
                basis_function.attrs_from_etree(etree=kwargs['etree'])
        else:
            basis_function = _make_function()

        #store instance
        try:
            self.instances['SubfunctionType'][function_type].append(
                    copy.copy(basis_function))
        except KeyError:
            self.instances['SubfunctionType'][function_type] = [
                    copy.copy(basis_function)]

        return basis_function
Exemple #35
0
def _subfiles(d):
    r = []
    stack = [([], d)]
    while stack:
        p, n = stack.pop()
        if isdir(n):
            for s in os.listdir(n):
                if s[:1] != '.':
                    stack.append((copy.copy(p) + [s], join(n, s)))
        else:
            r.append((p, n))
    return r
Exemple #36
0
def _subfiles(d):
    r = []
    stack = [([], d)]
    while stack:
        p, n = stack.pop()
        if isdir(n):
            for s in os.listdir(n):
                if s[:1] != '.':
                    stack.append((copy.copy(p) + [s], join(n, s)))
        else:
            r.append((p, n))
    return r
Exemple #37
0
    def changesettings(self, case_id, *args, **kw):
        import copy

        newsettings = copy.copy(self.settings)
        for name, val in kw.iteritems():
            newsettings.set(name, val)
        storepath = self.db.get_case_storepath(case_id)
        cfgpath = os.path.join(storepath, Includes.CASE_SETTINGSFILENAME)
        newsettings.set_storefile(cfgpath)
        newsettings.save()
        self.settings = newsettings
        return json.dumps({"success": 1})
Exemple #38
0
 def changesettings(self, case_id, *args, **kw ):
     import copy
     
     newsettings = copy.copy(self.settings)
     for name,val in kw.iteritems():
         newsettings.set( name, val )
     storepath = self.db.get_case_storepath(case_id)
     cfgpath = os.path.join( storepath, Includes.CASE_SETTINGSFILENAME )
     newsettings.set_storefile(cfgpath)
     newsettings.save()
     self.settings = newsettings
     return json.dumps({"success":1})
def new_words(num, forbidden):
    counter = 0
    i = 0
    word = 's0'
    answer = []
    while counter < num:
        if word not in forbidden:
            answer.append(copy.copy(word))
            counter += 1
        i += 1
        word = "s{}".format(i)

    return answer
Exemple #40
0
    def __init__(self, model, snippet_fn=None, utility_fn=None, seed=1):
        super(StructuredLearner, self).__init__(model, seed=seed)
        import copy

        self.snippet_model = copy.copy(model)
        self.utility = utility_fn
        self.snippet_utility = snippet_fn
        self.sent_tokenizer = None
        self.vct = None
        self.calibrate = None
        self.sent_rnd = np.random.RandomState(self.seed)
        self.cost_model = None
        self.cost_fn = None
Exemple #41
0
    def __init__(self, **kwargs):

        # Any kwarg will end in the meta of this instance.
        self._meta = copy.copy(self._meta)        
        self._meta.update(kwargs)

        # Create the collections.
        for collection_class in self._meta.collections:

            # Create instances of each collection.
            collection = collection_class(interface=self)

            # Store the collection in the api to avoid dynamic lookups.
            setattr(self, collection._meta.name, collection)
Exemple #42
0
	def __init__(self,value=30,std=1.0,vcw=3,p=np.array([])):
		Constraint.__init__(self,'TOA',p)

		self.value  = value 
		self.std    = std
		self.range  = value*0.3
		self.sstd   = self.std*0.3
		self.rescale(vcw)
		self.runable = True
		self.evaluated = False
		self.annulus_bound()
		self.Id    =copy.copy(self.C_Id)
		#self.mass  = 
		Constraint.C_Id = Constraint.C_Id+1   # constraint counter is incremented
Exemple #43
0
    def produce(self,
                parents: Sequence[Genome],
                spawner: GeneSpawner = None) -> Genome:
        """Produce a child Genome from parent Genomes and optional GenomeSpawner.

        Parameters
        ----------
        parents
            A list of parent Genomes given to the operator.
        spawner
            A GeneSpawner that can be used to produce new genes (aka Atoms).

        """
        return copy.copy(parents[0])
    def __copy__(self, value=None, temp=None):
        """Copy this GH; shallow copies of value & proposal so we don't have sampling issues."""

        c = type(self)(self.grammar, self.hypotheses)  # don't copy the cached matrices
        c.__dict__.update(self.__dict__)

        if value is None:
            value = copy.copy(self.value)
        c.set_value(value)

        if temp is not None:
            c.likelihood_temperature = temp

        return c
Exemple #45
0
 def __getstate__(self):
     import copy
     try:
         state = copy.copy(self.__dict__)
         for x in self.nopickle:
             if x in state:
                 del(state[x])
             else:
                 desc,cls = type(self).get_param_descriptor(x)
                 if desc and (desc.get_name(self) in state):
                     del(state[desc.get_name(self)])
             
     except AttributeError,err:
         pass
	def getTopGroups(self):
		# find topGroups
		topGroups = [grp for grp in ls(type='transform') if not grp.getParent() and not grp.getShape()]
	
		# remove cameras
		newTopGroups = []
		if self.mayaVer >= 2011:
			newTopGroups = copy.copy(topGroups)
		else: newTopGroups = copy(topGroups)
		for cam in topGroups:
			# check for dupes
			for child in cam.getChildren():
				self.check4Dup(child, 1, '. Suggest deleting history')
			if len(cam.getChildren()) == 1 and objectType(cam.getChildren()) == 'camera': newTopGroups.remove(cam)
		return newTopGroups
Exemple #47
0
 def generateRandomLinearBallot(self,seed):
     """
     Renders a randomly generated linear ballot.
     """
     import random,copy
     if seed == None:
         random.seed()
     else:
         random.seed(seed)
     linearBallot = {}
     voters = self.voters
     candidateList = [x for x in self.candidates]
     #print candidateList
     for v in voters:
         random.shuffle(candidateList)
         #print candidateList
         linearBallot[v] = copy.copy(candidateList)
     return linearBallot
def init_params(options, We_init):

    params = OrderedDict()
    # embedding
    params['Wemb'] = copy.copy(We_init)

    # Init the LSTM parameter:
    W = np.concatenate([ortho_weight(options.dim),
                        ortho_weight(options.dim),
                        ortho_weight(options.dim),
                        ortho_weight(options.dim)], axis=1)
    params['lstm_W'] = W
    U = np.concatenate([ortho_weight(options.dim),
                        ortho_weight(options.dim),
                        ortho_weight(options.dim),
                        ortho_weight(options.dim)], axis=1)
    params['lstm_U'] = U

    b = np.zeros((4 * options.dim))
    params['lstm_b'] = b.astype('float32')

    return params
Exemple #49
0
	def __init__(self,value=40,std=1.0,vcw=3,model={},p=np.array([])):	
		Constraint.__init__(self,'RSS',p)

		self.Id     = copy.copy(self.C_Id)
		Constraint.C_Id = Constraint.C_Id+1   # constraint counter is incremented
		self.value  = value  # attennation (dB)
		if len(model)==0:
			self.model['PL0'] =-34.7
			self.model['d0']  = 1.0 
			self.model['RSSnp'] = 2.64
			self.model['RSSStd'] = 4.34 
			self.model['Rest'] = 'mode'
		else	:
			self.model  = model

		self.LOC = RSSLocation(p)
		self.std  = (self.LOC.getRangeStd(p, self.model['PL0'], self.model['d0'], self.value,self.model['RSSnp'], self.model['RSSStd'], self.model['Rest'])[0])/0.3
		self.range    = self.LOC.getRange(p, self.model['PL0'], self.model['d0'], self.value, self.model['RSSnp'], self.model['RSSStd'], self.model['Rest'])[0]
		self.sstd   = self.std*0.3
		self.rescale(vcw)
		self.runable = True
		self.evaluated = False
		self.annulus_bound()
Exemple #50
0
def load_text(filenames, sep=" ", random_split=True, permute_wordforms=False, top=None, echo_to_tmp=True, remove_tags=False, small=False, vocabulary=None):
	"""
		This loads the filesnames by regex matching words, and then *recombining* them and then splitting by the split character.
		- sep - what should separate "words" (not necessarily a space, in some of our simulations)
		- random_split - should ranks and freqs be estimated separately? (via a binomial split)
		- permute_wordforms - if True, we shuffle up all the word forms (in-place in the corpus) before anything
		- top - only count the top this many words
		- echo_to_tmp - if True, we write to /tmp/load_text.txt the corpus as it looks (just for spot checking)
		- remove_tags - if True, we search and repalce things in < .. >, as in BNC
		- small - if true, we are debugging and we only use the first few

		Returns a list of freq, rank, word
	"""
	
	filenames = [f for f in filenames]
	if small: filenames = filenames[:50]
    
	# Load the actual corpus files
	corpus = ""
	for f in list(filenames):
		print "# Processing ", f
		for l in open(f, 'r'):
			l = l.strip().lower() # clean up
			if remove_tags: l = re.sub(r'<.*>', " ", l)
			
			corpus = corpus + " " + " ".join([x for x in re.findall("([a-z]+)", l)])
			
	corpus = re.sub("\\s+", " ", corpus) # clean up too many spaces
	
	# Restrict the vocaublary if we want
	if vocabulary is not None:
		corpus = " ".join(filter(lambda x: x in vocabulary, re.split(" ", corpus)))
	
	# If we permute wordforms, it means we shuffle around words and re-construct the corpus *before* we 
	# split things up. This lets us know if the real relationship between word lenght/frequency is what matters
	# in finding similar statistics
	if permute_wordforms:
		
		# construct a mapping
		word_types = list( set( re.split(" ", corpus) ) ) # using " " as a separator
		shuffled = copy.copy(word_types)	
		random.shuffle(shuffled) # shuffle this up
		
		fromto = dict() # build a mapping from -> to via the shuffling
		for f,t in zip(word_types, shuffled): fromto[f] = t
		
		# Now translate the corpus
		corpus = " ".join([ fromto[x] for x in re.split(" ", corpus) ])
		#print corpus
	
	# If we should dump a copy for perusal
	if echo_to_tmp:
		o = open("/tmp/load_text.txt", 'w')
		print >>o, corpus
		o.close()
		
	
	# Now we have the corpus, we may split it by whatever character we like and count the frequencies
	freq = defaultdict(int)
	for w in re.split(sep, corpus): 
		freq[w] += 1
	
	return freqdict2wordsfreqsranks(freq, random_split=random_split, top=top)
Exemple #51
0
 def __call__(self, prefix = None, wrapper = None):
     djp = copy.copy(self)
     djp.prefix  = prefix
     djp.wrapper = wrapper
     return djp
Exemple #52
0
	def get_dim(self):
		"""Retorna una copia 'superficial' (por valor) de la dimension"""
		dim = copy.copy(self.__dimMatrix)
		return dim
Exemple #53
0
def transitModel(anorm,M1,M2,R1,R2,period,inclination,bjd0,eccn,omega,depth,albedo,
                 c1,c2,c3,c4,gamma,contamination,npt,time,exptime,dtype,eclipses,
                 dopboost,ellipsoidal):

# takes account of:
# 1. non-linear limb darkening
# 2. doppler boosting
# 3. ellipsoidal variations
#
# INPUT ARGUMENTS
#            M1: stellar mass (Msun)
#            M2: mass of planet/companion (Msun)
#            R1: stellar radius (Rsun)
#            R2: radius of planet/companion (Rsun)
#        period: period of orbit (days)
#   inclination: orbital inclination (deg; 90 = edge-on)
#          bjd0: center of transit time (Epoch, BJD-2454900)
#         ecosw: e = eccentricity w = orbit angle of periastron
#         esinw: e = eccentricity w = orbit angle of periastron
#         depth: occultation depth (unitless)
#             c: non-linear limb-darkening (4 unitless coefficients)
#         gamma: gamma velocity (m/s)
# contamination: fractional contamination from background sources
#           npt: the number of points for the model to return
#          time: time (center) of each model point (days)
# exptime[npt]: the integration time for each model point (days)
#         dtype: 0 = photometry, 1 = RV
#
# OUTPUT VARIABLES
#        tmodel: the model for each time point
#
# OTHER PARAMETERS
#        nintg: used to find the average flux over the integration time
#            K: amplitude of RV
#         voff: radial velocity offset (m/s)
#       vmodel: model velocities (m/s)
#         vrot: rotational velocity (m/s)
#        Eanom: Eccentric anomaly
#        Manom: Mean anomaly over nintg subsampling of exposure times
# Tanom[nintg]: True anomaly
#       kepler: solves Kepler equations
#  trueanomaly: calcaulates the true anomaly
#  arad[nintg]: distance between star and planet
#         eccn: eccentricity
#          ted: parameter for thermal eclipse (mmag)
#         Psec: period of orbit (sec)
#          Per: period of orbit (days)
#        asemi: semi-major axis (m)
#         incl: orbital inclination (radians)
#   phi[nintg]: orbital phases of exposure sub-sampling (radians)
#     t[nintg]: times of exposure sub-sampling (days)
#    x2[nintg]: position of planet/companion during exposure sub-sampling (days)
#    y2[nintg]: position of planet/companion during exposure sub-sampling (days)

# PHYSICAL CONSTANTS
#           G: gravitational constant m3 kg-1 s-2
#          Cs: speed of light (m/s)
#        Msun: solar mass (kg)
#        Rsun: solar radius (m)
#         fDB: Doppler boosting factor
#          fT: ellipsoidal mass factor
#         tpi: 2 x pi
#        Pid2: pi / 2
#        c[4]: four limb darkening coefficients

# startup parameters

    nintg = 11
    vrot = 7.0e4
    G = 6.67384e-11
    Cs = 2.99792458e8
    Msun = 1.98892e30
    Rsun = 6.955e8
    fDB = 1.896
    fT = 3.37
    tPi = 2.0 * math.pi
    Pid2 = math.pi / 2.0
    
# fractional contamination from background sources

    dilute = copy.copy(contamination)

# body masses

    M1 = M1 * Msun
    M2 = M2 * Msun

# orbital period

    Per = copy.copy(period)
    Psec = copy.copy(period) * 8.64e4

# semi-major axis

    asemi = (Psec * Psec * G * (M1 + M2) / (4.0 * math.pi * math.pi))**(1.0 / 3.0)
   
# body radii

    R1 = abs(R1 * Rsun)
    R2 = abs(R2 * Rsun)

# impact parameter

    bmin = asemi / R1 * math.cos(inclination * math.pi / 180.0)

# eccentric orbit parameters
    
    ecosw = eccn * math.cos(math.pi * omega / 180)
    esinw = eccn * math.sin(math.pi * omega / 180)

# orbital eccentricity and omega

    eccn = math.sqrt(ecosw * ecosw + esinw * esinw)
    if eccn > 1.0: 
        eccn = 0.99
    if eccn == 0.0:
        eccn = 1.0e-10
        w = 1.0e-10
    else:
        w = math.atan(esinw / ecosw)
        if ecosw > 0.0 and  esinw < 0.0:
            w = tPi + w
        elif ecosw < 0.0 and esinw >= 0.0:
            w = math.pi + w
        elif ecosw < 0.0 and esinw < 0.0:
            w = math.pi + w
        if w == 0.0:
            w = 1.0e-10

# starting guess for eccentric anomaly and mean anomaly

           
    Eanom = copy.copy(w)
    Manom = copy.copy(w)

# parameter for thermal eclipse

    ted = depth * 1.0e-6

# non-linear limb darkening

    c = numpy.array([c1,c2,c3,c4],dtype='float32')

# inclination angle

    inclmin = 180.0 * math.tan((R1 + R2) / asemi) / math.pi
    inclmin=90.0-inclmin
    incl = copy.copy(inclination)
    if inclmin >= 0.0 and inclmin <= 90.0:
        if incl > 90.0:
            incl = 180.0 - incl
    incl = math.pi * (90.0 - incl) / 180.0

# observer-star-planet angle. Find phase at centre of transit

    epoch = copy.copy(bjd0)
    Eanom = kepler(Manom,Eanom,eccn)
    phi0 = trueanomaly(eccn,Eanom)

# RV initialization
 
    K = 2.0 * math.pi * G * M2**3 * (math.sin(incl + Pid2))**3 / \
        (Psec * (1.0 - eccn * eccn)**(3.0 / 2.0) * (M1 + M2) * (M1 + M2))
    K = K**(1.0 / 3.0)
    voff = copy.copy(gamma)

# normalization constant for light curve

    norm = math.pi

# initialize center of primary star to 0,0 coordinates

    x1 = 0.0
    y1 = 0.0

# subsampling of individual data points

    dnintg = float(nintg)
    dnintgm1 = 2.0 * dnintg - 2.0

# integration width initialization

    xintold = 0.0

# initialization of projected planet star distance

    y2pold = 0.0

# calculate model for each timestamp

    t = numpy.zeros((nintg),dtype='float64')
    phi = numpy.zeros((nintg),dtype='float64')
    x2 = numpy.zeros((nintg),dtype='float64')
    y2 = numpy.zeros((nintg),dtype='float64')
    Tanom = numpy.zeros((nintg),dtype='float64')
    arad = numpy.zeros((nintg),dtype='float64')
    tmodel = numpy.zeros((npt),dtype='float64')
    for i in range(npt):

# array of sub-sampled times spanning one exposure. times are centered on time[i]

        for j in range(nintg):
            t[j] = time[i] + exptime[i] * (2.0 * float(j) - dnintg - 1.0) / dnintgm1 - epoch
            phi[j]= t[j] / Per - math.floor(t[j] / Per)
            phi[j] = phi[j] * tPi
            Manom = phi[j] + w
            if Manom > tPi:
                Manom = Manom - tPi
            if Manom < 0.0:
                Manom = Manom + tPi
            Eanom = kepler(Manom,Eanom,eccn)
            Tanom[j] = trueanomaly(eccn,Eanom)
            if phi[j] > math.pi:
                phi[j] = phi[j] - tPi
            arad[j] = distance(asemi,eccn,Tanom[j])
            x2[j] = arad[j] * math.sin(Tanom[j] - phi0)
            y2[j] = arad[j] * math.cos(Tanom[j] - phi0) * math.sin(incl)

# photometric data case

        if dtype[i] == 0:

# initialize stellar surface area

            zarea = 0.0
            tflux = 0.0
            for j in range(nintg):

# doppler boosting
             
                if dopboost:
                    Kc = -K * (math.cos(Pid2 + Tanom[j] - phi0) + eccn * math.cos(w))
                    tflux = tflux + fDB * Kc / Cs

# ellipsoidal variations

                if ellipsoidal:
                    tflux = tflux + tides(M1,M2,R1,asemi,incl,Tanom[j],eccn,phi0)

# flux from star + planet/companion

                Ag = albedo * R1 * R1 / (arad[j] * arad[j])
                zarea = zarea + albedomod(t[j],Per,Ag,R1,R2,Tanom[j]-phi0)

# rescale flux and surface area 

            tflux = tflux / dnintg
            zarea = zarea / dnintg

# orbital phase (unitless) and flux change from planet transiting the star

            phase = Tanom[nintg / 2 + 1] - phi0
            if phase > math.pi:
                phase = phase - tPi
            if phase < -math.pi:
                phase = phase + tPi
            if abs(phase) < Pid2:
                (managol,b0,mulimb0,mulimbf,dist) = mandelagol(nintg,R1,R2,x1,x2,y1,y2,c)
                darea = (math.pi * managol + zarea + tflux) / norm
                vrotf = 0.0
            else:
                darea = 0.0
                for j in range(nintg):
                    darea = darea + eclmod2(R1,R2,x1,x2[j],y1,y2[j],zarea,norm,ted)
                    if j == 1:
                        xintold2 = copy.copy(xintold)
                        y2pold2 = copy.copy(y2pold)

# average area and add on delta term

                darea = darea / dnintg + tflux / norm
                xintold = copy.copy(xintold2)
                y2pold = copy.copy(y2pold2)

# Convert relative fluxes to magnitude to match observations

            tmodel[i] = darea * 1.0 + (1.0 - darea) * dilute

# RV data case

        elif dtype[i] == 1:

            tmodel[i] = 0.0
            for j in range(nintg):
                tmodel[i] = tmodel[i] + K * (math.cos(Pid2 + Tanom[j] - phi0) + eccn * math.cos(w))
            tmodel[i] = tmodel[i] / dnintg + voff

    return tmodel * anorm
  def preprocess(self, data):
    """ reads the shifts from a spreadsheet and updates the interruptions of the corresponding node objects
    """
    strptime = datetime.datetime.strptime
    # read the current date and define dateFormat from it
    try:
      now = strptime(data['general']['currentDate'], '%Y/%m/%d %H:%M')
      # calculate the hours to end the first day
      hoursToEndFirstDay = datetime.datetime.combine(now.date(), datetime.time(23,59,59)) - datetime.datetime.combine(now.date(), now.time())
      data['general']['dateFormat']='%Y/%m/%d %H:%M'
    except ValueError:
      now = strptime(data['general']['currentDate'], '%Y/%m/%d')
      hoursToEndFirstDay = datetime.time(23,59,59)
      data['general']['dateFormat']='%Y/%m/%d'
    self.initializeTimeSupport(data)
    
    shiftData = data["input"].get("shift_spreadsheet",[])
    nodes = data["graph"]["node"]

    defaultShiftPattern = {} #default shift pattern dictionary (if no pattern is defined for certain dates)
    exceptionShiftPattern = {} # exceptions for shift pattern dictionary as defined in the spreadsheet
    if shiftData:
      #shiftData.pop(0)
      #iteration through the raw data to structure it into ManPy config
      for line in shiftData:
        # if all the records of that line are none then continue
        toContinue = False
        for record in line:
          if record != None and record!='':
            toContinue = True
            break
        if not toContinue:
          continue
        # list to hold the working intervals start times
        timeStartList = []
        # list to hold the working intervals end times
        timeEndList = []
        #if no shift start was given, assume standard 8:00
        startTime = line[2]
        if startTime == '' or startTime == None:
          startTime = "00:00"
        shiftStart = self.convertToSimulationTime(strptime("%s %s" % (line[1], startTime), '%Y/%m/%d %H:%M'))
        #if no shift end was given, assume standard 18:00
        endTime = line[3]
        if endTime == '' or endTime == None:
          endTime = "23:59"
        shiftEnd = self.convertToSimulationTime(strptime("%s %s" % (line[1], endTime), '%Y/%m/%d %H:%M'))
        timePair = self.correctTimePair(shiftStart, shiftEnd)
        if not timePair:
          continue
        else:
          shiftStart, shiftEnd = timePair
          timeStartList.append(shiftStart)
          timeEndList.append(shiftEnd)

        if line[-1]:
          offshifts = line[-1].replace(" ", "").split(";")
          for offshift in offshifts:
            limits = offshift.split("-")
            breakStart = self.convertToSimulationTime(strptime("%s %s" % (line[1], limits[0]), '%Y/%m/%d %H:%M'))
            breakEnd = self.convertToSimulationTime(strptime("%s %s" % (line[1], limits[1]), '%Y/%m/%d %H:%M'))
            timePair = self.correctTimePair(breakStart, breakEnd)
            if not timePair:
              continue
            else:
              breakStart, breakEnd = timePair
              timeStartList.append(breakEnd)
              timeEndList.insert(0, breakStart)
        # sort the list before proceeding
        timeEndList.sort()
        timeStartList.sort()
        #if it is the current row is an extended row for the information belonging to a resource, and no resource name is entered
        if line[0]:
          entityID = line[0].split("-")[0]
        else:
          entityID = ""
        if str(entityID) == '': 
          #take it as a continuation for the last entered resource
          for index, start in enumerate(timeStartList):
            end = timeEndList[index]
            if not start and not end:
              continue
            exceptionShiftPattern[lastrec].append([start, end])
        #if resource name is defined
        elif str(entityID) not in exceptionShiftPattern:
          #take the name of the last entered resource from here
          lastrec = str(entityID)
          exceptionShiftPattern[lastrec] = []
          for index, start in enumerate(timeStartList):
            end = timeEndList[index]
            if not start and not end:
              continue
            # if there is no other entry
            if not len(exceptionShiftPattern[lastrec]):
              exceptionShiftPattern[lastrec] = [[start, end]]
            else:
              exceptionShiftPattern[lastrec].append([start, end])
        #to avoid overwriting existing records, if there is another entry for a resource but does not follow it immediately (e.g. W2-FS)
        else:
          lastrec = str(entityID)
          #extend the previous entry for the resource
          for index, start in enumerate(timeStartList):
            end = timeEndList[index]
            if not start and not end:
              continue
            exceptionShiftPattern[lastrec].append([start, end])

      #sorts the list in case the records were not entered in correct ascending order
      for info in exceptionShiftPattern:
        exceptionShiftPattern[info].sort(key=itemgetter(0))
      # ================================================================
      #create default pattern for all operators (10 days long)
      timeStartList = []
      timeEndList = []
      for dayNumber in range(0,20):
        startTime = "00:00"
        endTime = "23:59"
        upDate = now.date()+datetime.timedelta(days=dayNumber)
        shiftStart = self.convertToSimulationTime(strptime("%s %s" % (upDate, startTime), '%Y-%m-%d %H:%M'))
        shiftEnd = self.convertToSimulationTime(strptime("%s %s" % (upDate, endTime), '%Y-%m-%d %H:%M'))
        timePair = self.correctTimePair(shiftStart, shiftEnd)
        shiftStart, shiftEnd = timePair
        timeStartList.append(shiftStart)
        timeEndList.append(shiftEnd)
      #for every operator (can be also machine) create an entry on the defaultShiftPattern 
      for node, node_data in nodes.iteritems():
        #if the node is an operator
        if node_data.get('_class', None) in ['Dream.MachineJobShop', 'Dream.MouldAssembly'] :
          for index, start in enumerate(timeStartList):
            end = timeEndList[index]
            if not start and not end:
              continue
            if not node in defaultShiftPattern:
              defaultShiftPattern[node] = [[start, end]]
            else:
              defaultShiftPattern[node].append([start, end])
      # ================================================================
      
      for node, node_data in nodes.items():
        if node_data.get('_class', None) in ['Dream.MachineJobShop', 'Dream.MouldAssembly']:
          modifiedDefaultDays = [] # the days of the defaultShiftPattern that have been modified according to the exceptionShiftPattern
          if node in exceptionShiftPattern:
            for index1, exception in enumerate(exceptionShiftPattern[node]):
              # XXX think of the case where the exception starts one day and finishes the next
              # calculate the time difference in hours from the end of the first day to the end of the exception
              # check if we are still in the first day
              if hoursToEndFirstDay.total_seconds()/3600 > exception[-1]:
                exceptionDay = 0
              # calculate the number of days till the end of the exception
              else:
                exceptionDay = math.floor((exception[-1] - hoursToEndFirstDay.total_seconds()/3600)/24) + 1
              for index2, default in enumerate(defaultShiftPattern[node]):
                # check if we still are in the first day
                if hoursToEndFirstDay.total_seconds()/3600 > default[-1]:
                  defaultDay = 0
                # calculate the number of days till the end of the default shift
                else:
                  defaultDay = math.floor((default[-1] - hoursToEndFirstDay.total_seconds()/3600)/24) + 1
                if exceptionDay == defaultDay:
                  # update the defaultShiftPattern of the node (operator or machine)
                  # if the exception day has not been modified then delete the previous entry and use the first exception that occurs
                  if not exceptionDay in modifiedDefaultDays:
                    defaultShiftPattern[node][index2] = exception
                  # otherwise append it at the end 
                  else:
                    defaultShiftPattern[node].append(exception)
                  modifiedDefaultDays.append(exceptionDay) # the day has been modified, add to the modified days
                  break
          # update the interruptions of the nodes that have a defaultShiftPattern
          if node in defaultShiftPattern:
            # sort the shift pattern of every node
            defaultShiftPattern[node].sort(key=itemgetter(0))
            # //////////////////////////////////////////////////
            # check if the offshift period is very short; if it is, then the pattern should be updated removing the very short off shift intervals
            tempPattern = copy.copy(defaultShiftPattern[node])
            mappedPattern = [tempPattern[0]]
            for index, shift in enumerate(tempPattern):
              if index<len(tempPattern) and index>0:
                # XXX I do not know if this difference is small enough, but the difference of one minute is that one (24:00h-23:59h)
                # if the off interval is very short then update the end of the last on-shift interval with the end of the current on-shift period
                if shift[0] - tempPattern[index-1][-1] <0.017:
                  mappedPattern[-1][-1] = shift[-1]
                # otherwise keep the considered interval as is
                else:
                  mappedPattern.append(shift)
            defaultShiftPattern[node] = mappedPattern
            # //////////////////////////////////////////////////
            # get the interruptions of the object
            interruptions = node_data.get("interruptions", {})
            if not interruptions:
              node_data["interruptions"] = {}
            node_data["interruptions"]["shift"] = {"shiftPattern": defaultShiftPattern.pop(node),
                                                   "endUnfinished": 0}
	
    return data
Exemple #55
0
 def __init__( self, master, **options ):
    theOptions = copy.copy( self.DEFAULT_OPTIONS )
    theOptions.update( options )
    
    Tix.Frame.__init__( master, **self._frameOptions() )
    self._buildGUI( )