diff --git a/apps/benchmark/adreno/adreno_gpu_bench_clml.py b/apps/benchmark/adreno/adreno_gpu_bench_clml.py index 17c483fe2c76..ee2cd82df8e7 100755 --- a/apps/benchmark/adreno/adreno_gpu_bench_clml.py +++ b/apps/benchmark/adreno/adreno_gpu_bench_clml.py @@ -116,6 +116,7 @@ def print_progress(msg): def tune_tasks( tasks, measure_option, + tuner="xgb", n_trial=1024, early_stopping=None, log_filename="tuning.log", @@ -127,7 +128,40 @@ def tune_tasks( for i, tsk in enumerate(reversed(tasks)): print("Task: ", tsk) prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - tuner_obj = XGBTuner(tsk, loss_type="rank") + + # create tuner + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") + elif tuner == "ga": + tuner_obj = GATuner(tsk, pop_size=50) + elif tuner == "random": + tuner_obj = RandomTuner(tsk) + elif tuner == "gridsearch": + tuner_obj = GridSearchTuner(tsk) + else: + raise ValueError("Invalid tuner: " + tuner) tsk_trial = min(n_trial, len(tsk.config_space)) tuner_obj.tune( diff --git a/apps/benchmark/adreno/adreno_gpu_bench_texture.py b/apps/benchmark/adreno/adreno_gpu_bench_texture.py index 2228cda31a39..bf2c69f61ed7 100755 --- a/apps/benchmark/adreno/adreno_gpu_bench_texture.py +++ b/apps/benchmark/adreno/adreno_gpu_bench_texture.py @@ -115,6 +115,7 @@ def print_progress(msg): def tune_tasks( tasks, measure_option, + tuner="xgb", n_trial=1024, early_stopping=None, log_filename="tuning.log", @@ -126,7 +127,40 @@ def tune_tasks( for i, tsk in enumerate(reversed(tasks)): print("Task: ", tsk) prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - tuner_obj = XGBTuner(tsk, loss_type="rank") + + # create tuner + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") + elif tuner == "ga": + tuner_obj = GATuner(tsk, pop_size=50) + elif tuner == "random": + tuner_obj = RandomTuner(tsk) + elif tuner == "gridsearch": + tuner_obj = GridSearchTuner(tsk) + else: + raise ValueError("Invalid tuner: " + tuner) tsk_trial = min(n_trial, len(tsk.config_space)) tuner_obj.tune( diff --git a/gallery/how_to/deploy_models/deploy_model_on_adreno.py b/gallery/how_to/deploy_models/deploy_model_on_adreno.py index c2ba189a6715..d483fe49b096 100644 --- a/gallery/how_to/deploy_models/deploy_model_on_adreno.py +++ b/gallery/how_to/deploy_models/deploy_model_on_adreno.py @@ -323,7 +323,43 @@ for i, tsk in enumerate(reversed(tasks[:3])): print("Task:", tsk) prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - tuner_obj = XGBTuner(tsk, loss_type="rank") + + # choose tuner + tuner = "xgb" + + # create tuner + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") + elif tuner == "ga": + tuner_obj = GATuner(tsk, pop_size=50) + elif tuner == "random": + tuner_obj = RandomTuner(tsk) + elif tuner == "gridsearch": + tuner_obj = GridSearchTuner(tsk) + else: + raise ValueError("Invalid tuner: " + tuner) tsk_trial = min(n_trial, len(tsk.config_space)) tuner_obj.tune( diff --git a/gallery/how_to/tune_with_autotvm/tune_relay_arm.py b/gallery/how_to/tune_with_autotvm/tune_relay_arm.py index 0cb02c036fd7..3e5d7551e82a 100644 --- a/gallery/how_to/tune_with_autotvm/tune_relay_arm.py +++ b/gallery/how_to/tune_with_autotvm/tune_relay_arm.py @@ -275,14 +275,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": - tuner_obj = XGBTuner(tsk, loss_type="rank") + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") elif tuner == "xgb_knob": - tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") elif tuner == "xgb_itervar": - tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/gallery/how_to/tune_with_autotvm/tune_relay_cuda.py b/gallery/how_to/tune_with_autotvm/tune_relay_cuda.py index ee0a83ab8eb8..47ea99884df8 100644 --- a/gallery/how_to/tune_with_autotvm/tune_relay_cuda.py +++ b/gallery/how_to/tune_with_autotvm/tune_relay_cuda.py @@ -185,8 +185,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=100) elif tuner == "random": diff --git a/gallery/how_to/tune_with_autotvm/tune_relay_mobile_gpu.py b/gallery/how_to/tune_with_autotvm/tune_relay_mobile_gpu.py index dd0a3a9837ac..09bc04683616 100644 --- a/gallery/how_to/tune_with_autotvm/tune_relay_mobile_gpu.py +++ b/gallery/how_to/tune_with_autotvm/tune_relay_mobile_gpu.py @@ -272,8 +272,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/gallery/how_to/tune_with_autotvm/tune_relay_x86.py b/gallery/how_to/tune_with_autotvm/tune_relay_x86.py index a44c30bb89f9..a637f7222e70 100644 --- a/gallery/how_to/tune_with_autotvm/tune_relay_x86.py +++ b/gallery/how_to/tune_with_autotvm/tune_relay_x86.py @@ -154,8 +154,30 @@ def tune_kernels( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": + if tuner == "xgb": + tuner_obj = XGBTuner(task, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": tuner_obj = XGBTuner(task, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(task, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(task, pop_size=50) elif tuner == "random": diff --git a/gallery/tutorial/autotvm_relay_x86.py b/gallery/tutorial/autotvm_relay_x86.py index ef8fa4a113c3..4e2dc0591eb7 100644 --- a/gallery/tutorial/autotvm_relay_x86.py +++ b/gallery/tutorial/autotvm_relay_x86.py @@ -355,7 +355,44 @@ # Tune the extracted tasks sequentially. for i, task in enumerate(tasks): prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - tuner_obj = XGBTuner(task, loss_type="rank") + + # choose tuner + tuner = "xgb" + + # create tuner + if tuner == "xgb": + tuner_obj = XGBTuner(task, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(task, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(task, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(task, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(task, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(task, loss_type="rank-binary", feature_type="curve") + elif tuner == "ga": + tuner_obj = GATuner(task, pop_size=50) + elif tuner == "random": + tuner_obj = RandomTuner(task) + elif tuner == "gridsearch": + tuner_obj = GridSearchTuner(task) + else: + raise ValueError("Invalid tuner: " + tuner) + tuner_obj.tune( n_trial=min(tuning_option["trials"], len(task.config_space)), early_stopping=tuning_option["early_stopping"], diff --git a/python/tvm/autotvm/testing/tune_relay.py b/python/tvm/autotvm/testing/tune_relay.py index 743127ec1ded..7de38e44c0d1 100644 --- a/python/tvm/autotvm/testing/tune_relay.py +++ b/python/tvm/autotvm/testing/tune_relay.py @@ -212,7 +212,7 @@ def main(): if ARGS.num_trials > 0: for i, task in enumerate(tasks): prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - tuner_obj = XGBTuner(task, loss_type="rank") + tuner_obj = XGBTuner(task, loss_type="reg") n_trial = min(len(task.config_space), ARGS.num_trials) tuner_obj.tune( n_trial=n_trial, diff --git a/python/tvm/autotvm/tuner/xgboost_cost_model.py b/python/tvm/autotvm/tuner/xgboost_cost_model.py index a80c35090324..d5876258463b 100644 --- a/python/tvm/autotvm/tuner/xgboost_cost_model.py +++ b/python/tvm/autotvm/tuner/xgboost_cost_model.py @@ -71,6 +71,8 @@ class XGBoostCostModel(CostModel): The cost model predicts the normalized flops. If is 'rank', use pairwise rank loss to train cost model. The cost model predicts relative rank score. + If is 'rank-binary', use pairwise rank loss with binarized labels to train cost model. + The cost model predicts relative rank score. num_threads: int, optional The number of threads. log_interval: int, optional @@ -80,7 +82,13 @@ class XGBoostCostModel(CostModel): """ def __init__( - self, task, feature_type, loss_type, num_threads=None, log_interval=25, upper_model=None + self, + task, + feature_type, + loss_type="reg", + num_threads=None, + log_interval=25, + upper_model=None, ): global xgb super(XGBoostCostModel, self).__init__() @@ -103,6 +111,8 @@ def __init__( self.num_threads = num_threads self.log_interval = log_interval + self.loss_type = loss_type + if loss_type == "reg": self.xgb_params = { "max_depth": 3, @@ -114,7 +124,7 @@ def __init__( "alpha": 0, "objective": "reg:linear", } - elif loss_type == "rank": + elif loss_type in ("rank", "rank-binary"): self.xgb_params = { "max_depth": 3, "gamma": 0.0001, @@ -217,6 +227,7 @@ def fit(self, xs, ys, plan_size): xgb_average_recalln_curve_score(plan_size), ], verbose_eval=self.log_interval, + loss_type=self.loss_type, ) ], ) @@ -301,6 +312,7 @@ def fit_log(self, records, plan_size, min_seed_records=500): xgb_average_recalln_curve_score(plan_size), ], verbose_eval=self.log_interval, + loss_type=self.loss_type, ) ], ) @@ -453,6 +465,20 @@ def _extract_curve_feature_log(arg): return x, y +def _binarize_evals(evals): + """binarize evaluation labels""" + bin_evals = [] + for evalset in evals: + # binarize labels in xgb.dmatrix copy + barray = evalset[0].get_data().copy() + blabel = evalset[0].get_label().copy() + blabel[blabel < 0.5] = 0.0 + blabel[blabel >= 0.5] = 1.0 + # pylint: disable=R1721 + bin_evals.append(tuple([xgb.DMatrix(barray, blabel)] + [e for e in evalset[1:]])) + return bin_evals + + class XGBoostCallback(TrainingCallback): """Base class for XGBoost callbacks.""" @@ -475,6 +501,7 @@ def __init__( stopping_rounds, metric, fevals, + loss_type="reg", evals=(), log_file=None, maximize=False, @@ -490,6 +517,7 @@ def __init__( self.log_file = log_file self.maximize = maximize self.verbose_eval = verbose_eval + self.loss_type = loss_type self.skip_every = skip_every self.state = {} @@ -533,8 +561,19 @@ def _fmt_metric(value, show_stdv=True): return False ##### evaluation ##### + mod_evals = self.evals + if self.loss_type == "rank-binary": + mod_evals = _binarize_evals(self.evals) + + if self.loss_type == "rank" and int(xgb.__version__[0]) >= 2: + # since xgboost pr#8931 + raise RuntimeError( + "Use 'rank-binary' instead of 'rank' loss_type with xgboost %s >= 2.0.0" + % xgb.__version__ + ) + for feval in self.fevals: - bst_eval = model.eval_set(self.evals, epoch, feval) + bst_eval = model.eval_set(mod_evals, epoch, feval) res = [x.split(":") for x in bst_eval.split()] for kv in res[1:]: res_dict[kv[0]] = [float(kv[1])] diff --git a/python/tvm/autotvm/tuner/xgboost_tuner.py b/python/tvm/autotvm/tuner/xgboost_tuner.py index 9dec54c2d5f7..0e77bf674bac 100644 --- a/python/tvm/autotvm/tuner/xgboost_tuner.py +++ b/python/tvm/autotvm/tuner/xgboost_tuner.py @@ -53,6 +53,8 @@ class XGBTuner(ModelBasedTuner): The cost model predicts the normalized flops. If is 'rank', use pairwise rank loss to train cost model. The cost model predicts relative rank score. + If is 'rank-binary', use pairwise rank loss with binarized labels to train cost model. + The cost model predicts relative rank score. num_threads: int, optional The number of threads. @@ -77,7 +79,7 @@ def __init__( task, plan_size=64, feature_type="itervar", - loss_type="rank", + loss_type="reg", num_threads=None, optimizer="sa", diversity_filter_ratio=None, diff --git a/python/tvm/contrib/torch/pytorch_tvm.py b/python/tvm/contrib/torch/pytorch_tvm.py index ffab4fa0d246..30b0dd4f8c0e 100644 --- a/python/tvm/contrib/torch/pytorch_tvm.py +++ b/python/tvm/contrib/torch/pytorch_tvm.py @@ -49,8 +49,30 @@ def tune_tasks( prefix = f"[Task {i + 1:2d}/{len(tasks):2d}] " # create tuner - if tuner in ("xgb", "sgb-rank"): + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=100) elif tuner == "random": diff --git a/python/tvm/driver/tvmc/autotuner.py b/python/tvm/driver/tvmc/autotuner.py index b9d0e3558286..82b5cc1598fd 100644 --- a/python/tvm/driver/tvmc/autotuner.py +++ b/python/tvm/driver/tvmc/autotuner.py @@ -210,7 +210,23 @@ def add_tune_parser(subparsers, _, json_params): ) autotvm_group.add_argument( "--tuner", - choices=["ga", "gridsearch", "random", "xgb", "xgb_knob", "xgb-rank"], + choices=[ + "ga", + "gridsearch", + "random", + "xgb", + "xgb_knob", + "xgb_itervar", + "xgb_curve", + "xgb_rank", + "xgb_rank_knob", + "xgb_rank_itervar", + "xgb_rank_curve", + "xgb_rank_binary", + "xgb_rank_binary_knob", + "xgb_rank_binary_itervar", + "xgb_rank_binary_curve", + ], default="xgb", help="type of tuner to use when tuning with autotvm.", ) @@ -449,7 +465,9 @@ def tune_model( trials. tuner : str, optional The type of tuner to use when tuning with autotvm. Can be one of - "ga", "gridsearch", "random", "xgb", "xgb_knob", and "xgb-rank". + "ga", "gridsearch", "random", "xgb", "xgb_knob", "xgb_itervar", "xgb_curve", + "xgb_rank", "xgb_rank_knob", "xgb_rank_itervar", "xgb_rank_binary", "xgb_rank_binary_knob", + "xgb_rank_binary_itervar" and "xgb_rank_binary_curve". min_repeat_ms : int, optional Minimum time to run each trial. Defaults to 0 on x86 and 1000 on other targets. early_stopping : int, optional @@ -795,10 +813,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # Create a tuner - if tuner in ("xgb", "xgb-rank"): - tuner_obj = XGBTuner(tsk, loss_type="rank") + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/tests/python/contrib/test_clml/test_adreno_collage_targets.py b/tests/python/contrib/test_clml/test_adreno_collage_targets.py index d08b76c3b582..4cf86a0e058d 100644 --- a/tests/python/contrib/test_clml/test_adreno_collage_targets.py +++ b/tests/python/contrib/test_clml/test_adreno_collage_targets.py @@ -137,7 +137,7 @@ def tune_autotvm_tasks(tasks, log_filename): continue logging.info(f"Using autotvm to tune {task.name}") - tuner_obj = tvm.autotvm.tuner.XGBTuner(task, loss_type="rank") + tuner_obj = tvm.autotvm.tuner.XGBTuner(task, loss_type="reg") if os.path.exists(tmp_log_filename): tuner_obj.load_history(tvm.autotvm.record.load_from_file(tmp_log_filename)) diff --git a/tests/python/contrib/test_hexagon/test_autotvm.py b/tests/python/contrib/test_hexagon/test_autotvm.py index da60e20c3bf4..74ea66ab248a 100644 --- a/tests/python/contrib/test_hexagon/test_autotvm.py +++ b/tests/python/contrib/test_hexagon/test_autotvm.py @@ -88,10 +88,30 @@ def tune_tasks( for i, tsk in enumerate(reversed(tasks)): prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) - if tuner in ("xgb", "xgb-rank"): - tuner_obj = XGBTuner(tsk, loss_type="rank") + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/tests/python/relay/collage/demo_collage_partitioner.py b/tests/python/relay/collage/demo_collage_partitioner.py index 2c9314516746..0b7c815a8806 100644 --- a/tests/python/relay/collage/demo_collage_partitioner.py +++ b/tests/python/relay/collage/demo_collage_partitioner.py @@ -180,7 +180,7 @@ def tune_autotvm_tasks(tasks, log_filename): continue logging.info(f"Using autotvm to tune {task.name}") - tuner_obj = tvm.autotvm.tuner.XGBTuner(task, loss_type="rank") + tuner_obj = tvm.autotvm.tuner.XGBTuner(task, loss_type="reg") if os.path.exists(tmp_log_filename): tuner_obj.load_history(tvm.autotvm.record.load_from_file(tmp_log_filename)) diff --git a/tests/python/unittest/test_autotvm_xgboost_model.py b/tests/python/unittest/test_autotvm_xgboost_model.py index 7fa3daede07e..b9f157247eae 100644 --- a/tests/python/unittest/test_autotvm_xgboost_model.py +++ b/tests/python/unittest/test_autotvm_xgboost_model.py @@ -32,10 +32,10 @@ def test_fit(): task, target = get_sample_task() records = get_sample_records(n=500) - base_model = XGBoostCostModel(task, feature_type="itervar", loss_type="rank") + base_model = XGBoostCostModel(task, feature_type="itervar", loss_type="reg") base_model.fit_log(records, plan_size=32) - upper_model = XGBoostCostModel(task, feature_type="itervar", loss_type="rank") + upper_model = XGBoostCostModel(task, feature_type="itervar", loss_type="reg") upper_model.load_basemodel(base_model) xs = np.arange(10) diff --git a/vta/scripts/tune_resnet.py b/vta/scripts/tune_resnet.py index d46f00b493ba..2c284f05a0de 100644 --- a/vta/scripts/tune_resnet.py +++ b/vta/scripts/tune_resnet.py @@ -183,8 +183,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") + elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/vta/tutorials/autotvm/tune_alu_vta.py b/vta/tutorials/autotvm/tune_alu_vta.py index a0c26ff9933e..8a4db13ac409 100644 --- a/vta/tutorials/autotvm/tune_alu_vta.py +++ b/vta/tutorials/autotvm/tune_alu_vta.py @@ -179,10 +179,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": - tuner_obj = XGBTuner(tsk, loss_type="rank") + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random": diff --git a/vta/tutorials/autotvm/tune_relay_vta.py b/vta/tutorials/autotvm/tune_relay_vta.py index 2f505b2a86a6..dc5bd462276d 100644 --- a/vta/tutorials/autotvm/tune_relay_vta.py +++ b/vta/tutorials/autotvm/tune_relay_vta.py @@ -264,10 +264,30 @@ def tune_tasks( prefix = "[Task %2d/%2d] " % (i + 1, len(tasks)) # create tuner - if tuner == "xgb" or tuner == "xgb-rank": - tuner_obj = XGBTuner(tsk, loss_type="rank") + if tuner == "xgb": + tuner_obj = XGBTuner(tsk, loss_type="reg") elif tuner == "xgb_knob": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="knob") + elif tuner == "xgb_itervar": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="itervar") + elif tuner == "xgb_curve": + tuner_obj = XGBTuner(tsk, loss_type="reg", feature_type="curve") + elif tuner == "xgb_rank": + tuner_obj = XGBTuner(tsk, loss_type="rank") + elif tuner == "xgb_rank_knob": tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob") + elif tuner == "xgb_rank_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="itervar") + elif tuner == "xgb_rank_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="curve") + elif tuner == "xgb_rank_binary": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary") + elif tuner == "xgb_rank_binary_knob": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="knob") + elif tuner == "xgb_rank_binary_itervar": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="itervar") + elif tuner == "xgb_rank_binary_curve": + tuner_obj = XGBTuner(tsk, loss_type="rank-binary", feature_type="curve") elif tuner == "ga": tuner_obj = GATuner(tsk, pop_size=50) elif tuner == "random":