def get_t_and_mu(T, D): """ returns three lists, the first is a list of numpy column vectors of data points t with missing data removed. The second is a list of mean vectors corresponding to the list of t-vectors the third is a list of lists with the indices at which data were missing in the t-vectors""" T_boole = np.isnan(T) N = T.shape[1] mu = calc_mean_T(T) data_is_missing = np.any(T_boole) if data_is_missing: t_list = [] mu_list = [] nan_indices_list = N * [[]] for i in range(0, N): t_i_missing = T[:, i] for j in range(0, D): if np.isnan(T[j, i]): copy = nan_indices_list[i].copy() copy.append(j) nan_indices_list[i] = copy nan_indices = nan_indices_list[i] t_i_removed = np.delete(t_i_missing, nan_indices) mu_i_removed = np.delete(mu, nan_indices) t_list.append(t_i_removed) mu_list.append(mu_i_removed) else: t_list = [] mu_list = [] nan_indices_list = N * [[]] for i in range(0, N): t_list.append(T[:, i].reshape(D, 1)) mu_list.append(mu) return t_list, mu_list, nan_indices_list
def generate(data): # Randomize the percentage of program which is serial. Store the information in params. p = [5, 10, 20, 25, 50, 100] idx = random.choice(range(6)) percentage = p[idx] data['params']['percentage'] = percentage # Compute the corresponding solutions for each of the possible percentages. Determine the correct ones. Store the information in params. solutions = [100 / i for i in p] solution = solutions[idx] data['params']['solution'] = solution # Store the incorrect solutions in params. "i" is short-hand for incorrect. copy = [] for i in range(6): copy.append(solutions[i]) copy.remove(solution) i0 = copy[0] i1 = copy[1] i2 = copy[2] i3 = copy[3] i4 = copy[4] data['params']['i0'] = i0 data['params']['i1'] = i1 data['params']['i2'] = i2 data['params']['i3'] = i3 data['params']['i4'] = i4
def astar(init, goal, network): path = [] open = [] closed = [] ### YOUR CODE GOES BELOW HERE ### open.append((0, list(), init)) while not len(open) == 0: item = min(open) open.remove(item) if item[2] not in closed: if goal == item[2]: return item[1], closed closed.append(item[2]) for edge in network: if item[2] == edge[0]: copy = item[1][:] copy.append(edge[1]) open.append( (item[0] + euDis(edge[1], goal), copy, edge[1])) elif item[2] == edge[1]: copy = item[1][:] copy.append(edge[0]) open.append( (item[0] + euDis(edge[0], goal), copy, edge[0])) ### YOUR CODE GOES ABOVE HERE ### return path, closed
def __add__(self, value): first_element = self.__values[0] copy = LinkedList(first_element) for node in self.__values[1:]: copy.append(node) copy.append(value) return copy
def duplicatePuzzle(self): copy = [] for i in range(len(self.puzzle)): temp = [] for j in range(len(self.puzzle)): temp.append(self.puzzle[i][j]) copy.append(temp) return copy
def inventory_copy(self): # make a copy of current inventory state copy = [] for i in self.inventory: c = Inventory(i.name, i.quantity) copy.append(c) return copy
def duplicate(self): copy = LinkedList() node1 = self.head while node1: node = node1.value copy.append(node) node1 = node1.next return copy
def copy(self, puzzle): copy = [] for i in range(0, len(puzzle)): temp = [] for j in range(0, len(puzzle)): temp.append(puzzle[i][j]) copy.append(temp) return copy
def getCopy(self): copy = [] for i in range(len(self.puzzle)): temp = [] for j in range(len(self.puzzle)): temp.append(self.puzzle[i][j]) copy.append(temp) return copy
def copy(self): copy = [] for i in self.board: line = [] for j in i: line.append(j) copy.append(line) return TilePuzzle(copy)
def load_file(input_file: str) -> [str]: with open(input_file) as data_file: data = data_file.readlines() copy = [] for line in data: line = line.rstrip("\n") line = list(line) copy.append(line) return copy
def getContents(self): """Returns: a new list containing copies of the points in this cluster. The result is a list of list of numbers. It has to be computed from the indices. """ copy = [] for i in range(len(self.getIndices())): copy.append(self._dataset.getPoint(self.getIndices()[i])) return copy
def deep_copy(obj): '''Return a deep (i.e., non-aliased at all) copy of obj, a nested list of integers''' #Base case: if type(obj) != list: return obj copy = [] for elem in obj: #The leap of faith: assume that deep_copy works! copy.append(deep_copy(elem)) return copy
def convertInf(graph, inf_val): copy = [] for i in range(len(graph)): row = [] for j in range(len(graph)): if graph[i][j] == float(inf_val): row.append(0) else: row.append(graph[i][j]) copy.append(row) return copy
def resample(self): self.normalize() weights = [p.weight for p in self._particles] total_weight = sum(weights) choice = np.random.choice(self._particles, size=self.num_particles, replace=True, p=weights / total_weight) copy = [] for p in choice: copy.append(Particle(x=p.x, y=p.y, theta=p.theta, ln_p=p.weight)) self.particles = copy
def make_copy(self): """ Returns copy of Field.field from self Return: [[Cell]] """ copy = [] for i in range(self.size): copy.append([]) for i, row in enumerate(self.field): for cell in row: copy[i].append(Cell(cell.id, cell.direction, cell.vehicle_size)) return copy
def build_residual_graph(graph, flow): victory = change_to_dict_of_dict(graph) length = len(graph) # keep track of all the forward edge: capacity - flow lst = [] for num in victory: adjacent = victory[num] for elem in adjacent: capacity = victory[num][elem] flowing = flow[num][elem] weight = capacity - flowing victory[num][elem] = weight lst.append([num, elem]) # keep track of all the backward edge: - f # for edge in lst: node1 = edge[0] node2 = edge[1] flowing = flow[node1][node2] if flowing == 0: continue else: victory[node2][node1] = -flowing # Remove all the 0 residual capacity # my_dict = {} for i in victory: adjacent = victory[i] my_dict[num] = None temp = {} for j in adjacent: residual_capacity = adjacent[j] if residual_capacity != 0: temp[j] = residual_capacity else: continue my_dict[i] = temp # sort dictionary in ascending order # final = {} for num in range(length): final[num] = my_dict[num] updated_graph = [] for x in final: adjacent = final[x] copy = [] for y in adjacent: edge = [y, adjacent[y]] copy.append(edge) updated_graph.append(copy) return updated_graph
def removeFullRows(app): copy, counter = [], 0 for row in app.board: #checkRow is a helper functions that tells us if the row is filled if checkRow(app, row) == False: copy.append(row) else: counter += 1 #adding the empty rows(replacing those that were removed) for c in range(counter): copy.insert(0, [app.emptyColor] * app.cols) app.board = copy app.score += counter
def _create_clone(self, element): if isinstance(element, NavigableString): return type(element)(element) copy = Tag(None, element.builder, element.name, element.namespace, element.nsprefix) # work around bug where there is no builder set # https://bugs.launchpad.net/beautifulsoup/+bug/1307471 copy.attrs = dict(element.attrs) for attr in ('can_be_empty_element', 'hidden'): setattr(copy, attr, getattr(element, attr)) for child in element.contents: copy.append(self._create_clone(child)) return copy
def preprocess(self, data): shape = np.array(data).shape # If list of flat lists if shape[0] > shape[1]: copy = [] for i in range(shape[0]): copy.append( Process.multiple_methods(argument=data[i], methods=[Process.logarithm, Process.moveable_lines])) return copy # If one single else: return Process.multiple_methods(argument=data, methods=[Process.logarithm, Process.moveable_lines])
def all_options(cells_left): if cells_left == 1: return [[0], [1]] extension = [] arrays = all_options(cells_left - 1) for array in arrays: #in every configuration append 0, and also have a copy where 1's are appended instead of 0's copy = array[:] copy.append(1) array.append(0) extension.append(copy) arrays.extend(extension) return arrays
def updateDicts(nodesByClusterID, maxID): copy = [] for node in nodesByClusterID[maxID]: copy.append(node.getID()) del nodesByClusterID[node.getID()] for ID in copy: for nodeID in nodesByClusterID.keys(): for n in nodesByClusterID[nodeID]: if n.getID() == ID: nodesByClusterID[nodeID].remove(n) del copy return nodesByClusterID
def updateDicts(nodesByClusterID,maxID): copy=[] for node in nodesByClusterID[maxID]: copy.append(node.getID()) del nodesByClusterID[node.getID()] for ID in copy: for nodeID in nodesByClusterID.keys(): for n in nodesByClusterID[nodeID]: if n.getID()==ID: nodesByClusterID[nodeID].remove(n) del copy return nodesByClusterID
def remove_item_nested_list(nested_gff, remove_string): """removes an item from nested_gff and returns an item that you do want :nested_gff: TODO :returns: TODO """ copy = [] for gene in nested_gff: smaller_list = [] for hit in gene: if remove_string in hit: pass else: smaller_list.append(hit) copy.append(smaller_list) return copy
def updateDicts(nodesByClusterID, maxID, clusters, nodes): copy = [] for nodeID in nodesByClusterID[maxID]: #burayi bir kontrol et bakalim copy.append(nodeID) clusters[maxID].append(nodes[nodeID]) del nodesByClusterID[nodeID] for nodeID in nodesByClusterID.keys(): copy2 = [] for n in nodesByClusterID[nodeID]: copy2.append(n) for node in copy2: for ID in copy: if node == ID: nodesByClusterID[nodeID].remove(node) del copy2 del copy return nodesByClusterID, clusters
def updateDicts(nodesByClusterID,maxID,clusters,nodes): copy=[] for nodeID in nodesByClusterID[maxID]: #burayi bir kontrol et bakalim copy.append(nodeID) clusters[maxID].append(nodes[nodeID]) del nodesByClusterID[nodeID] for nodeID in nodesByClusterID.keys(): copy2=[] for n in nodesByClusterID[nodeID]: copy2.append(n) for node in copy2: for ID in copy: if node==ID: nodesByClusterID[nodeID].remove(node) del copy2 del copy return nodesByClusterID,clusters
def __init__(self, params, dbType, dataTypeConverter, outparams=None): names = {} copy = [] for p in params: if isinstance(p, OutParam): if outparams: value = outparams.pop(0) else: value = p.value() if p.dataType is not None: typeCode = dataTypeConverter.convertType( dbType, p.dataType) value = dataTypeConverter.convertValue( dbType, p.dataType, typeCode, value) copy.append(value) if p.name is not None: names[p.name] = value else: copy.append(p) super(OutParams, self).__setattr__("config", copy) super(OutParams, self).__setattr__("names", names)
def copy_nodes(self, state): """ Returns a 'deep' copy of list of nodes. Parameters ---------- state: a list A list containing Node objects representing a wire path. Returns ------- list A list containing 'deep' copy of the nodes in the original list. """ copy = [] for node in state: node_copy = node.get_copy() copy.append(node_copy) return copy
def check(): p = preprocessing() letters = generateLetters() result = {} try: print(sys.getsizeof(result)) except AttributeError: print("sys.getsizeof exists in Python ") for word in p: if is_word_possible(word, letters):## returns True if any letter is in word length = len(word) try: copy = result[length]## keeps track of everything in result at that word size except KeyError: copy = [] copy.append(word) result[length] = copy##re appends result with the new word for that word size ##le = max(len(x) for x in result) #print(result) print(result[sorted(result.keys())[-1]])##prints the longest word/words found if same length
def analyse(self): #分析剩下的3n张牌是否满足胡牌条件 if len(self.handtiles) == 0: copy = [] for meld in self.result: copy.append(meld) for meld in self.melds: copy.append(meld) self.allResult.append(copy) self.back() return color = self.handtiles[0].color number = self.handtiles[0].number if (self.total[color][number] in [1, 2]): if ((color in [4, 5]) or number + 2 > 9 or self.total[color][number + 1] == 0 or self.total[color][number + 2] == 0): self.back() return else: self.totalToResult_shun(color, number) self.analyse() elif (self.total[color][number] == 3): self.totalToResult_ke(color, number) self.analyse() elif (self.total[color][number] == 4): if ((color in [4, 5]) or number + 2 > 9 or self.total[color][number + 1] == 0 or self.total[color][number + 2] == 0): self.back() return else: self.totalToResult_shun(color, number) self.totalToResult_ke(color, number) self.analyse() self.back()
def copyboard(B): copy = [] for row in B: copy.append(row) return copy
def read(f): copy = [] for line in f: for c in line: copy.append(c) return copy
def duplicate_paths(self, paths): copy = [] for path in paths: copy.append(path.Duplicate()) return tuple(copy)
if copy[r][c] == '#' and sum_neighbours(copy, r, c) >= 4: if changed == False: changed = True return False elif copy[r][c] == '#' and sum_neighbours(copy, r, c) < 4: return True return False copy = [] while changed: copy = [] for i in range(len(layout)): copy.append([]) for j in range(len(layout[i])): copy[i].append(layout[i][j]) changed = False for i in range(len(copy)): for j in range(len(copy[i])): if not copy[i][j] == '.': if becomes_occupied(copy, i, j): layout[i][j] = '#' else: layout[i][j] = 'L' res = 0 for i in range(len(layout)): for j in range(len(layout[i])):
def get_schedules(data): timeout = 15 # in seconds activities = {} itineraries = [] size = data["size"] # Build a dict of activity data for i in range(size): activity = data["activities"][i] activity["slots"] = [e.replace(" (", "_").replace(")", "") for e in activity["slots"]] activities[i] = {"code" : activity["code"], "name" : activity["name"], "category" : activity["category"], "slots" : [enums.Weeks[e].value for e in activity["slots"]], "length" : int(activity["length"])} # First activity first = activities[0] # Initialize itineraries with first activity for slot in first["slots"]: length = first["length"] weeks = [(slot + i) for i in range(length)] itineraries.append([(0, weeks)]) # Prevent infinite looping t1 = datetime.now() # For each activity after the first for i in range(1, size): new_itineraries = [] slots = activities[i]["slots"] length = activities[i]["length"] # For each itinerary already in the list for j in range(len(itineraries)): # For each slot in the current activity for slot in slots: weeks = [slot + k for k in range(length)] conflict = False # For each activity in the current itinerary for activity in itineraries[j]: # For each week in the current activity's length for week in weeks: # Check for time conflict if week in activity[1]: conflict = True # Add activity if no conflict if not conflict: copy = itineraries[j].copy() copy.append((i, weeks)) new_itineraries.append(copy) # Check timeout if (datetime.now() - t1).seconds > timeout: return {"success" : False, "error" : "Operation timed out."} # Only interested in up to date itineraries itineraries = new_itineraries ret_val = {"success": True, "itineraries" : []} # For each itinerary for itinerary in itineraries: new_itinerary = [] # For each activity in the itinerary for activity in itinerary: # Set activity info new_activity = activities[activity[0]].copy() new_activity["slots"] = activity[1] new_itinerary.append(new_activity) # Add itinerary to return array ret_val["itineraries"].append(new_itinerary) ret_val["size"] = len(ret_val["itineraries"]) return ret_val