def merge_datasets(dat1, dat2): both = dat1._copy(axis=False) both.data = np_concatenate((dat1.data, dat2.data)) both.axis['time'] = np_concatenate((dat1.time, dat2.time)) both.axis['chan'] = np_concatenate((dat1.chan, dat2.chan)) both.axis['freq'] = np_concatenate((dat1.freq, dat2.freq)) return both
def numpy_full_list_cycle(array, desired_length): '''retuns array with desired length by cycling''' length_diff = desired_length - array.shape[0] if length_diff > 0: if length_diff < array.shape[0]: return np_concatenate((array, array[:length_diff])) new_part = np_repeat(array, ceil(length_diff / array.shape[0]), axis=0) if len(array.shape) > 1: shape = (ceil(length_diff / array.shape[0]), 1) else: shape = ceil(length_diff / array.shape[0]) new_part = np_tile(array, shape) return np_concatenate((array, new_part[:length_diff])) return array[:desired_length]
def rle_encode(self, img: np_ndarray): ''' img: numpy array, 1 - mask, 0 - background Returns run length as string formated ''' pixels = img.flatten() pixels = np_concatenate([[0], pixels, [0]]) runs = np_where(pixels[1:] != pixels[:-1])[0] + 1 runs[1::2] -= runs[::2] return ' '.join(str(x) for x in runs)
def numpy_full_list(array, desired_length): '''retuns array with desired length by repeating last item''' if not isinstance(array, ndarray): array = np_array(array) length_diff = desired_length - array.shape[0] if length_diff > 0: new_part = np_repeat(array[np_newaxis, -1], length_diff, axis=0) return np_concatenate((array, new_part))[:desired_length] return array[:desired_length]
def numpy_match_long_repeat(list_of_arrays): '''match numpy arrays length by repeating last one''' out = [] maxl = 0 for array in list_of_arrays: maxl = max(maxl, array.shape[0]) for array in list_of_arrays: length_diff = maxl - array.shape[0] if length_diff > 0: new_part = np_repeat(array[np_newaxis, -1], length_diff, axis=0) array = np_concatenate((array, new_part)) out.append(array) return out
def numpy_match_long_cycle(list_of_arrays): '''match numpy arrays length by cycling over the array''' out = [] maxl = 0 for array in list_of_arrays: maxl = max(maxl, array.shape[0]) for array in list_of_arrays: length_diff = maxl - array.shape[0] if length_diff > 0: if length_diff < array.shape[0]: array = np_concatenate((array, array[:length_diff])) else: new_part = np_repeat(array, ceil(length_diff / array.shape[0]), axis=0) if len(array.shape) > 1: shape = (ceil(length_diff / array.shape[0]), 1) else: shape = ceil(length_diff / array.shape[0]) new_part = np_tile(array, shape) array = np_concatenate((array, new_part[:length_diff])) out.append(array) return out
def append(data, state): outd = {} for dkey in DATA_KEYS: outd[dkey] = [] for key in data.keys(): for dkey in DATA_KEYS: if data[key].state == state: outd[dkey].append(data[key].data[dkey]) for dkey in DATA_KEYS: outd[dkey] = np_concatenate(outd[dkey], 0) return outd
def extend(self, note_sequence: 'NoteSequence') -> 'NoteSequence': validate_type('note_sequence', note_sequence, NoteSequence) if len(self.note_attr_vals) and self.note_attr_vals[0].shape != note_sequence.note_attr_vals[0].shape: raise NoteSequenceInvalidAppendException( 'NoteSequence extended to a NoteSequence must have the same number of attributes') # Either this is the first note in the sequence, or it's not # If it is, make this sequence the note_attr_vals of this sequence. If it is not, append these notes # to the existing sequence -- we have already confirmed the shapes conform if existing sequence is not empty. if len(self.note_attr_vals): self.note_attr_vals = np_concatenate((self.note_attr_vals, note_sequence.note_attr_vals)) else: self.note_attr_vals = np_copy(note_sequence.note_attr_vals) self.update_range_map() return self
def L2_metric(q0, q1, I0, I1): if array_equal(I0, I1): return sqrt(sum( (I0[k+1]-I0[k])*(norm(q0[k]-q1[k])**2) for k in range(I0.shape[0]-1) )) #create array of shared interpolation points I = unique(np_concatenate((I0, I1))) i,j = 0,0 l2_sum = 0.0 #interpolate to previous when creating diff for k in range(I.shape[0]-1): l2_sum += (I[k+1] - I[k]) * (norm(q0[i] - q1[j])**2) if I0[i+1] <= I[k+1]: i +=1 if I1[j+1] <= I[k+1]: j +=1 return sqrt(l2_sum)
def concatenate(x, y): return np_concatenate((x, (y[-2::-1] + x[-1] - y[-1])), axis=0)
def inset_regular_pols(np_verts, np_pols, np_distances, np_inset_rate, np_make_inners, np_faces_id, custom_normals, matrices, offset_mode='CENTER', proportional=False, concave_support=True, index_offset=0, use_custom_normals=False, output_old_face_id=True, output_old_v_id=True, output_pols_groups=True): pols_number = np_pols.shape[0] pol_sides = np_pols.shape[1] v_pols = np_verts[np_pols] #shape [num_pols, num_corners, 3] if offset_mode == 'SIDES': inner_points = sides_mode_inset(v_pols, np_inset_rate, np_distances, concave_support, proportional, use_custom_normals, custom_normals) elif offset_mode == 'MATRIX': inner_points = matrix_mode_inset(v_pols, matrices, use_custom_normals, custom_normals) else: if any(np_distances != 0): if use_custom_normals: normals = custom_normals else: normals = np_faces_normals(v_pols) average = np.sum( v_pols, axis=1 ) / pol_sides #+ normals*np_distances[:, np_newaxis] #shape [num_pols, 3] inner_points = average[:, np_newaxis, :] + ( v_pols - average[:, np_newaxis, :] ) * np_inset_rate[:, np_newaxis, np_newaxis] + normals[:, np_newaxis, :] * np_distances[:, np_newaxis, np_newaxis] else: average = np.sum(v_pols, axis=1) / pol_sides #shape [num_pols, 3] inner_points = average[:, np_newaxis, :] + ( v_pols - average[:, np_newaxis, :] ) * np_inset_rate[:, np_newaxis, np_newaxis] idx_offset = len(np_verts) + index_offset new_v_idx = np_arange(idx_offset, pols_number * pol_sides + idx_offset).reshape( pols_number, pol_sides) side_pols = np.zeros([pols_number, pol_sides, 4], dtype=int) side_pols[:, :, 0] = np_pols side_pols[:, :, 1] = np_roll(np_pols, -1, axis=1) side_pols[:, :, 2] = np_roll(new_v_idx, -1, axis=1) side_pols[:, :, 3] = new_v_idx side_faces = side_pols.reshape(-1, 4) new_insets = new_v_idx[np_make_inners] if pol_sides == 4: new_faces = np_concatenate([side_faces, new_insets]).tolist() else: new_faces = side_faces.tolist() + new_insets.tolist() old_v_id = np_pols.flatten().tolist() if output_old_v_id else [] if output_old_face_id: side_ids = np.repeat(np_faces_id[:, np_newaxis], pol_sides, axis=1) inset_ids = np_faces_id[np_make_inners] old_face_id = np.concatenate((side_ids.flatten(), inset_ids)).tolist() else: old_face_id = [] if output_pols_groups: pols_groups = np_repeat( [1, 2], [len(side_faces), len(new_insets)]).tolist() else: pols_groups = [] return (inner_points.reshape(-1, 3).tolist(), new_faces, new_insets.tolist(), old_v_id, old_face_id, pols_groups)
def extrude_edges(vertices, edges, faces, edge_mask, face_data, matrices): if not matrices: matrices = [Matrix()] if face_data: face_data_matched = repeat_last_for_length(face_data, len(faces)) if edge_mask: edge_mask_matched = repeat_last_for_length(edge_mask, len(edges)) if isinstance(edges, np_ndarray): if edge_mask: np_edges = edges[edge_mask_matched] else: np_edges = edges else: if edge_mask: np_edges = np_array(edges)[edge_mask_matched] else: np_edges = np_array(edges) if isinstance(vertices, np_ndarray): np_verts = vertices else: np_verts = np_array(vertices) affeced_verts_idx = np_unique(np_edges) if len(matrices) == 1: extruded_verts = matrix_apply_np(np_verts[affeced_verts_idx], matrices[0]) new_vertices = np_concatenate([np_verts, extruded_verts]).tolist() else: extruded_verts = [ m @ Vector(v) for v, m in zip(np_verts[affeced_verts_idx].tolist(), cycle(matrices)) ] new_vertices = vertices + extruded_verts top_edges = np_edges + len(vertices) mid_edges = np_zeros((len(affeced_verts_idx), 2), dtype=int) mid_edges[:, 0] = affeced_verts_idx mid_edges[:, 1] = affeced_verts_idx + len(vertices) extruded_edges_py = (np_concatenate([top_edges, mid_edges])).tolist() extruded_faces = np_zeros((len(np_edges), 4), dtype=int) extruded_faces[:, :2] = np_edges extruded_faces[:, 2] = top_edges[:, 1] extruded_faces[:, 3] = top_edges[:, 0] extruded_faces_py = extruded_faces.tolist() if isinstance(edges, np_ndarray): new_edges = np_concatenate([edges, top_edges, mid_edges]).tolist() else: new_edges = edges + extruded_edges_py if faces and faces[0]: if isinstance(faces, np_ndarray): new_faces = np_concatenate([faces, extruded_faces]).tolist() else: new_faces = faces + extruded_faces_py else: new_faces = extruded_faces_py if face_data: bvh = bvh_tree_from_polygons(vertices, faces, all_triangles=False, epsilon=0.0, safe_check=True) mid_points = (np_verts[np_edges[:, 1]] + np_verts[np_edges[:, 0]]) / 2 face_idx = [bvh.find_nearest(P)[2] for P in mid_points.tolist()] new_face_data = face_data_matched + [ face_data_matched[p] for p in face_idx ] else: new_face_data = [] return (new_vertices, new_edges, new_faces, extruded_verts, extruded_edges_py, extruded_faces_py, new_face_data)
def r3_dnn_apply_keras(target_dirname, old_stft_obj=None, cuda=False, saving_to_disk=True): LOGGER.info( '{}: r3: Denoising original stft with neural network model...'.format( target_dirname)) ''' r3_dnn_apply takes an old_stft object (or side effect load from disk) and saves a new_stft object ''' scan_battery_dirname = os_path_dirname(target_dirname) model_dirname = os_path_dirname(os_path_dirname(scan_battery_dirname)) # load stft data if old_stft_obj is None: old_stft_fpath = os_path_join(target_dirname, 'old_stft.mat') with h5py_File(old_stft_fpath, 'r') as f: stft = np_concatenate( [f['old_stft_real'][:], f['old_stft_imag'][:]], axis=1) else: stft = np_concatenate( [old_stft_obj['old_stft_real'], old_stft_obj['old_stft_imag']], axis=1) N_beams, N_elements_2, N_segments, N_fft = stft.shape N_elements = N_elements_2 // 2 # combine stft_real and stft_imag # move element position axis stft = np_moveaxis(stft, 1, 2) # TODO: Duplicate? # reshape the to flatten first two axes stft = np_reshape( stft, [N_beams * N_segments, N_elements_2, N_fft]) # TODO: Duplicate? # process stft with networks k_mask = list(range(3, 6)) for frequency in k_mask: process_each_frequency_keras(model_dirname, stft, frequency) # reshape the stft data stft = np_reshape( stft, [N_beams, N_segments, N_elements_2, N_fft]) # TODO: Duplicate? # set zero outside analysis frequency range discard_mask = np_ones_like(stft, dtype=bool) discard_mask[:, :, :, k_mask] = False # pylint: disable=E1137 stft[discard_mask] = 0 del discard_mask # mirror data to negative frequencies using conjugate symmetry end_index = N_fft // 2 stft[:, :, :, end_index + 1:] = np_flip(stft[:, :, :, 1:end_index], axis=3) stft[:, :, N_elements:2 * N_elements, end_index + 1:] = -1 * stft[:, :, N_elements:2 * N_elements, end_index + 1:] # move element position axis stft = np_moveaxis(stft, 1, 2) # TODO: Duplicate? # change variable names # new_stft_real = stft[:, :N_elements, :, :] new_stft_real = stft[:, :N_elements, :, :].transpose() # new_stft_imag = stft[:, N_elements:, :, :] new_stft_imag = stft[:, N_elements:, :, :].transpose() del stft # change dimensions # new_stft_real = new_stft_real.transpose() # new_stft_imag = new_stft_imag.transpose() # save new stft data new_stft_obj = { 'new_stft_real': new_stft_real, 'new_stft_imag': new_stft_imag } if saving_to_disk is True: new_stft_fname = os_path_join(target_dirname, 'new_stft.mat') savemat(new_stft_fname, new_stft_obj) LOGGER.info('{}: r3 Done.'.format(target_dirname)) return new_stft_obj
def __init_alliances(self): alliances = [[team[3:] for team in alliance['picks']] for alliance in self.raw_event['alliances']] alliances = np_array(alliances, np_int) numbers = np_vstack(np_arange(1, 9, 1)) self.alliances = np_concatenate((numbers, alliances), 1)
def __init_alliances(self): alliances = [[team[3:] for team in alliance['picks']] for alliance in self.raw_event['alliances']] alliances = np_array(alliances, np_int) numbers = np_vstack(np_arange(1, 9, 1)) self.alliances = np_concatenate((numbers, alliances), 1)