def main(x, y): s = utils.get_random_list(x, 0, y) f = utils.get_random_list(x, 0, y) assert(len(s) == len(f)) n = len(s) a = [] k = 0 for m in range(1, n): if s[m] >= f[k]: a.append(m) k = m return a
def main(): """ Main. """ list_to_search = utils.get_random_list(length=100) print(list_to_search) print(find_most_frequent_int(list_to_search))
def main(): a = get_random_list() print(a) list_classes = (LinkedList, DoubleLinkedList) for list_class in list_classes: _list = list_class() for x in a: _list.insert(x) _list.print_list() mid_element = a[len(a) // 2] first_element = a[-1] last_element = a[0] test_elements = (mid_element, first_element, last_element) for element in test_elements: print('Deleting %s...' % element) _list.delete(element) _list.print_list() print('Reversing list...') _list.reverse() _list.print_list() print('Re-reversing the list...') _list.reverse2() _list.print_list()
def main(): """ Main. """ list_to_search = utils.get_random_list() left, right = find_sum_to_ten(list_to_search) print('{0} + {1} = {2}'.format(left, right, TARGET_INT))
def main(): a = get_random_list() print(a) q = MyQueue() for x in a: q.add(x) q.print_queue()
def test_sort_stack(): a = get_random_list(5) stack = Stack() for x in a: stack.push(x) stack.print_stack() sort_stack(stack) stack.print_stack()
def mai_n(): x = 20 p = utils.get_random_list(x + 1) t1 = time.time() print("cut_rod: ", cut_rod(p, x)) t2 = time.time() print("cut_rod_pd: ", cut_rod_pd(p, x, {})) t3 = time.time() print("tempo cut_rod: ", (t2 - t1)) print("tempo cut_rod_pd: ", (t3 - t2))
def test_set_of_stacks(): a = get_random_list(13) print(a) stacks = SetOfStacks(limit=5) for x in a: stacks.push(x) stacks.print_stacks() print('Popping 5 values...') for i in range(5): _ = stacks.pop() stacks.print_stacks()
def main(): a = get_random_list() h = MinHeap() #test insertion to heap for x in a: h.insert(x, verbose=True) h.print_heap() #test extraction from heap while h.peek(): _ = h.extract_min(verbose=True) h.print_heap() h.heapify(a) h.print_heap()
def main(): a = get_random_list() print(a) hash_classes = ( SimpleHashTable, HashTable, ) for hash_class in hash_classes: htable = hash_class() for x in a: index = htable._hash(x) print('Inserting %3s at index %2s.' % (x, index)) htable.insert(x) htable.print_table()
def run_random_eval(self, k): precisions = [] recalls = [] aps = [] miufs = [] diversities = {'structural-1':[], 'structural-2':[], 'semantic':[]} recs = {} for active_user in self.target_users: user_specific_note_contents = self.all_note_contents.loc[self.all_note_contents['NoteID'].isin(self.eval_nids_per_person[active_user])] eval_list, already_read_list, gtp = self.filter_out_evaluation_list_for_pid(active_user) # print('----------------------------') eval_list = eval_list - already_read_list # print('%d / %d'%(len(eval_list), len(gtp))) # print('max precision@%d, %f'%(k, len(gtp)/k if len(gtp)/k<1 else 1.0)) # print('----------------------------') max_possible_recall = k/len(gtp) if len(gtp)>k else 1.0 rec_set = set(utils.get_random_list(eval_list, k)) precision = self.precision_at_k(rec_set, gtp) precisions.append(precision) recall = self.recall_at_k(rec_set, gtp) recalls.append(recall) ap = utils.apk(list(gtp), list(rec_set), k) aps.append(ap) miuf = self.mean_inverse_user_frequency(rec_set, self.eval_inters_per_person[active_user]) miufs.append(miuf) sd = self.structural_diversity(rec_set) semd = self.semantic_diversity(rec_set) diversities['semantic'].append(semd) logging.info('Semantic diversity@%d: %.3f',k, semd) recs[active_user] = [rec_set, precision, recall, miuf, semd, sd, max_possible_recall] return recs
def get_sample_graph(size=10): a = get_random_list(size) g = Graph() for x in a: node = GraphNode(val=x) g.add(node) nodes = g.get_nodes() for i, node in enumerate(nodes): #Add two random directed edges for each node. index_range = [x for x in range(len(nodes)) if x != i] first_index = random.choice(index_range) node.add_edge(nodes[first_index]) index_range = [ x for x in range(len(nodes)) if x != i and x != first_index ] second_index = random.choice(index_range) node.add_edge(nodes[second_index]) return g
import pesquisa_linear import pesquisa_binaria import insertion_sort import merge_sort import max_subarray import max_subarray_dc import utils if __name__ == '__main__': x = 16750 a = list(range(0, x)) v = 8 utils.report_time(pesquisa_linear, (a, v), n_iter=1) utils.report_time(pesquisa_binaria, (a, v), n_iter=1) a = utils.get_random_list(x) utils.report_time(insertion_sort, (a, ), n_iter=1) utils.report_time(merge_sort, (a, ), n_iter=1) a = utils.get_random_list(x, -x, x) utils.report_time(max_subarray, (a, ), n_iter=1) utils.report_time(max_subarray_dc, (a, ), n_iter=1)
import dijkstra import selecao_atividades import huffman import kruskal import mochila import prim import utils if __name__ == '__main__': for n in [50, 100, 500, 1000, 5000, 10000, 15000]: print(n) WG = utils.WeightedGraph(n, 20, 5) utils.report_time(kruskal, [WG], n_iter=1) utils.report_time(prim, [WG], n_iter=1) print edges = utils.gen_edges(n, 20, 5) utils.report_time(bellman_ford, [edges, 'A'], n_iter=1) utils.report_time(dijkstra, [edges, 'A', 'E'], n_iter=1) print utils.report_time(selecao_atividades, [n, 10], n_iter=1) utils.report_time(huffman, [n], n_iter=1) print values = utils.get_random_list(n) weights = utils.get_random_list(n) utils.report_time(cut_rod, [values, 10], n_iter=1) utils.report_time(mochila, [values, weights, n, n], n_iter=1) print
if not is_valid_line(row) or not is_valid_line(column): return False for i in range(0, n, 3): for j in range(0, n, 3): line = [ board[r][s] for r in range(i, i + 3) for s in range(j, j + 3) ] if not is_valid_line(line): return False return True if __name__ == '__main__': A = get_random_list(size=8, max_int=6) i = 2 print(A, A[i]) A_part = dutch_flag_partition1(A, i) print(A_part) A = get_random_list(size=8, max_int=6) print(A, A[i]) dutch_flag_partition2(A, i) print(A) A = get_random_list(size=8, max_int=6) print(A, A[i]) dutch_flag_partition3(A, i) print(A) print('\n')