def standard(pipe): print("Pipeline:", type(pipe.named_steps["model"]).__name__) X_train, X_test, y_train, y_test = train_test_split(X, y) def fit(): pipe.fit(X_train, y_train) util.measure(fit) predicted = pipe.predict(X_test) score = np.mean(predicted == y_test) report = metrics.classification_report(y_test, predicted) write_results(score, report) print(score)
def getAverageNodeRound(func, *args, rounds=MAX_ROUND): elapsed = 0 # log.debug(args) for i in range(rounds): for j in range(NUM_NODE): elapsed += measure(func, nodes[j], *args) return elapsed / (rounds * NUM_NODE)
def __lvm(self): vg = self.args.vg[0] lv = self.args.lv[0] (out, t) = measure(lvm_backup, vg, lv, self.args.out[0], verbose=self.args.verbose) _print_backup_result(f"{vg}/{lv}", out, t)
def insertionTest(): log.info("Insertion Test:") total = 0 for i in range(NUM_NODE): data = [re.sub('\s+', ' ', line) for line in open(Path(datadir).joinpath('test'+str(i)+'.txt'))] elapsed = measure(baseline.insert, nodes[i], data) total += elapsed log.info('Node %d Insertion time: %f' % (i, elapsed)) log.info("total insertion time: %f " % total) log.info("average insertion time: %f" % (total/NUM_NODE)) output_json['insertion'] = total/NUM_NODE
def gridding(model, X, y): # Grid Search parameters = { 'model__n_iter': [1000], # number of epochs 'model__alpha': [1e-4], # learning rate 'model__loss': ['hinge'], # logistic regression, 'model__penalty': ['l2', 'elasticnet'], #'model__n_jobs': [-1] } grid = GridSearchCV(model, parameters, cv=10, scoring="accuracy", n_jobs=-1) #paramGrid = ParameterGrid(parameters) #X_train, X_test, y_train, y_test = train_test_split(X, y) #bestModel, bestScore, allModels, allScores = pf.bestFit(model, paramGrid, X_train, y_train, X_test, y_test, metric=accuracy_score, bestScore='max',scoreLabel='Accuracy') #print(bestModel, bestScore) # Fitting print("Fitting") def fit(): grid.fit(X, y) util.measure(fit) print("Best Score", grid.best_score_) print("Best Params", grid.best_params_) print( grid.predict([ "Fat people should eat less", "really hurts but can be fixed yo pain" ])) return grid
def __backup(self): """Runs all backups specified in config file.""" with self.args.config[0].open("r") as f: conf = json.load(f) self.__backup_validate_conf(conf) results = [] for device in conf["devices"]: results.append( measure( lvm_backup, device["vol-group"], device["name"], Path(conf["output_path"]), verbose=self.args.verbose, )) for ((out, t), device) in zip(results, conf["devices"]): _print_backup_result(f"{device['vol-group']}/{device['name']}", out, t)
def mem_empty(): cupy.cuda.alloc(0) def mem_1K(): cupy.cuda.alloc(1024) def mem_1M(): cupy.cuda.alloc(1024 * 1024) cnt = 1000 util.measure(dummy_call, "dummy_call", cnt) util.measure(mem_empty, "mem_0B", cnt) util.measure(mem_1K, "mem_1K", cnt) util.measure(mem_1M, "mem_1M", cnt) sizes = [(0, "0B"), (1024, "1K"), (1024 * 1024, "1M")] for xp in [cupy, numpy]: if xp is cupy: str = "cp" else: str = "np" if xp is cupy: for size, s in sizes: memptr = cupy.cuda.alloc(size) def alloc():
sys.exit(0) elif action == 'parse': scanned_list = project.scan() files = project.get_files_from_db(dbname) pp.pprint(files) if files: scanned_list = project.scan_modified(scanned_list, files) logging.debug('scanned_list: {}'.format([x[0] for x in scanned_list])) parsed_dict = project.parse_all(scanned_list) pp.pprint('Parsed {} in {} with keys'.format(len(parsed_dict), builddir)) pp.pprint(parsed_dict.keys()) elif action == 'parse_single': scanned_list = project.scan() files = project.get_files_from_db(dbname) if files: scanned_list = project.scan_modified(scanned_list, files) parsed_dict = project.parse_all_single(scanned_list) pp.pprint('Parsed {} in {} with keys'.format(len(parsed_dict), builddir)) pp.pprint(parsed_dict.keys()) storage = Storage(dbname, writeback=True) storage_update = util.measure(storage.update) merge_recurse_inplace(storage, parsed_dict, Storage) storage.close()
import numpy import numpy import cupy import util for xp in [cupy, numpy]: for size in [2 ** i for i in range(10, 16, 5)]: st = xp.random.RandomState() st.seed(0) str = "cupy" if xp is cupy else "numpy" def f(): st.beta(2, 2, size=(size,)) util.measure(f, "beta_%s , %5d" % (str, size), 5) def f(): st.binomial(10, 0.5, size=(size,)) util.measure(f, "binomial_%s, %5d" % (str, size), 5) def f(): st.lognormal(size=(size,)) util.measure(f, "lognormal%s, %5d" % (str, size), 5) def f(): st.normal(size=(size,)) util.measure(f, "normal%s , %5d" % (str, size), 5)
@cupy.fuse() def saxpy_fuse(a, x, y): return a * x + y for xp in [cupy, numpy]: for size in [2 ** i for i in range(20)]: if xp is numpy and size > 2 ** 13: continue a = numpy.float32(2.0) x = xp.ones((1024, size), 'f') y = xp.ones((1024, size), 'f') def f(): saxpy(a, x, y) str = "cp" if xp is cupy else "np" util.measure(f, "saxpy_%s , %8d" % (str, size)) xp = cupy for size in [2 ** i for i in range(20)]: a = numpy.float32(2.0) x = xp.ones((1024, size), 'f') y = xp.ones((1024, size), 'f') def f(): saxpy_fuse(a, x, y) util.measure(f, "saxpy_fuse, %8d" % (size))
import numpy import cupy import util for xp in [cupy, numpy]: for size in [2**i for i in range(11)]: a = xp.zeros((size, 32, 32), dtype='f') b = xp.zeros((size, 32, 32), dtype='f') def f(): xp.matmul(a, b) str = "cp" if xp is cupy else "np" util.measure(f, "matmul_%s, %5d" % (str, size), 5)
buf = [] for i, s in enumerate(sizes): buf.append(cupy.cuda.alloc(s)) if i % 10 == 0: buf[i // 10] = None def f3(): [cupy.empty((s,), dtype='b') for s in sizes] def f4(): buf = [] for i, s in enumerate(sizes): buf.append(cupy.empty((s,), dtype='b')) if i % 10 == 0: buf[i // 10] = None util.measure(f1, "alloc ") cupy.get_default_memory_pool().free_all_blocks() gc.collect() util.measure(f2, "alloc_and_free") cupy.get_default_memory_pool().free_all_blocks() gc.collect() util.measure(f3, "empty ") cupy.get_default_memory_pool().free_all_blocks() gc.collect() util.measure(f4, "empty_and_free")
def call_adam_fuse(grad, data, state_m, state_v): adam_fuse(grad, numpy.float32(hp.lr), numpy.float32(1 - hp.beta1), numpy.float32(1 - hp.beta2), numpy.float32(hp.eps), numpy.float32(hp.eta), numpy.float32(hp.weight_decay_rate), data, state_m, state_v) sizes = [1, 10, 100, 1000, 2000, 5000] for size in sizes: zero = cupy.zeros((size, size)) def f(): call_adam(zero, zero, zero, zero) util.measure(f, "adam , %4d" % (size), 100) for size in sizes: zero = cupy.zeros((size, size)) def f(): call_adam_fuse(zero, zero, zero, zero) util.measure(f, "adam_fuse, %4d" % (size), 100) for size in sizes[:4]: zero = numpy.zeros((size, size)) def f(): call_adam_fuse(zero, zero, zero, zero)
for xp in [cupy, numpy]: for trans in [False, True]: for size in [2**i for i in range(16)]: if trans: a = xp.ones((32, size), 'f').T else: a = xp.ones((size, 32), 'f') b = xp.ones((size, 32), 'f') def f(): a + b head = "t" if trans else "n" str = "cp" if xp is cupy else "np" util.measure(f, "%s_add_%s, %5d" % (head, str, size)) for xp in [cupy, numpy]: for trans in [False, True]: for size in [2**i for i in range(16)]: if trans: a = xp.ones((32, size), 'f').T else: a = xp.ones((size, 32), 'f') b = xp.ones((size, 32), 'f') def f(): a**b head = "t" if trans else "n" str = "cp" if xp is cupy else "np"
parser.add_argument('--file-prefix', type=str, default="./tmp", help="Prefix for the directory to use to store temp files") args = parser.parse_args() print("***Start***") print(json.dumps(vars(args))) size = args.size order = args.order interval = args.interval num_steps = args.num_steps checkpoint = args.checkpoint write_files = args.write_files file_prefix = args.file_prefix sym = np.zeros((size,)) fwd_op = DevitoOperator((70, 70), order) wrp = Checkpointer(fwd_op, None, 10000, num_steps, DummyCheckpoint({'sym': sym}), DummyCheckpoint({'sym': sym}), interval=interval, nrevcp=4, file_prefix=file_prefix, write_files=write_files) args = [] kwargs={} if checkpoint: testing_callable = wrp.apply_forward args = [sym] else: testing_callable = fwd_op.apply kwargs = {'t_start': 0, 't_end': num_steps} timings = measure(testing_callable, *args, **kwargs) print(timings) print(min(timings))