def sparse_sketch_rules(): """Return the sketch rules for sparse op""" sparse_sketch_rule_list = [ auto_scheduler.PreloadCustomSketchRule( sparse_conv2d_meet_condition_func, sparse_conv2d_apply_func, "SparseConv2D"), auto_scheduler.PreloadCustomSketchRule( sparse_dense_meet_condition_func, sparse_dense_apply_func, "SparseDense"), # Add more sketch rules for sparse ] return sparse_sketch_rule_list
def test_sketch_search_policy_custom_sketch(): def meet_condition_func(search_policy, state, stage_id): return auto_scheduler.PreloadCustomSketchRule.APPLY_AND_SKIP_REST def apply_func(search_policy, state, stage_id): ret = [] state = auto_scheduler.loop_state.State( state, search_policy.search_task.compute_dag) C = state.stage_ops[2] ret.append([state.state_object, -1]) s1 = state.copy() i, _, _ = s1[C].iters s1.split(C, i, [8]) ret.append([s1.state_object, -1]) return ret search_common( cost_model=auto_scheduler.XGBModel(), init_search_callbacks=[ auto_scheduler.PreloadCustomSketchRule(meet_condition_func, apply_func) ], )
def test_cpu_custom_sketch(): def meet_condition_func(search_policy, state, stage_id): return auto_scheduler.PreloadCustomSketchRule.APPLY_AND_SKIP_REST def apply_func(search_policy, state, stage_id): ret = [] state = auto_scheduler.loop_state.State(state, search_policy.search_task.compute_dag) C = state.stage_ops[2] ret.append([state.state_object, -1]) s1 = state.copy() i, _, _ = s1[C].iters s1.split(C, i, [8, 2]) ret.append([s1.state_object, -1]) return ret sketches = generate_sketches( matmul_auto_scheduler_test, (512, 512, 512), "llvm", init_search_callbacks=[ auto_scheduler.PreloadCustomSketchRule(meet_condition_func, apply_func) ], ) assert len(sketches) == 2 assert sketches[0].stages[2].iters[0].range.extent == 512 assert sketches[0].stages[2].iters[1].range.extent == 512 assert sketches[0].stages[2].iters[2].range.extent == 512 assert sketches[1].stages[2].iters[0].range.extent == 32 assert sketches[1].stages[2].iters[1].range.extent == 8 assert sketches[1].stages[2].iters[2].range.extent == 2 assert sketches[1].stages[2].iters[3].range.extent == 512 assert sketches[1].stages[2].iters[4].range.extent == 512
# * see :any:`auto_scheduler.TuningOptions` for more parameters # * Here, we need to create a :code:`auto_scheduler.SketchPolicy` object, and add the custom sketch # rule as a `init_search_callbacks`. log_file = "sparse_dense.json" tune_option = auto_scheduler.TuningOptions( num_measure_trials=10, measure_callbacks=[auto_scheduler.RecordToFile(log_file)], verbose=2, ) search_policy = auto_scheduler.SketchPolicy( task, program_cost_model=auto_scheduler.XGBModel(), init_search_callbacks=[ auto_scheduler.PreloadCustomSketchRule(meet_condition_func, apply_func, "SparseDense") ], ) ###################################################################### # Run the search # ^^^^^^^^^^^^^^ # Now we get all inputs ready. # We can kick off the search and let the auto-scheduler do its magic. # After some measurement trials, we can load the best schedule from the log # file and apply it. # Run auto-tuning (search) # Notice: We do not run the tuning in our webpage server since it takes too long. # Uncomment the following line to run it by yourself. task.tune(tune_option, search_policy)