Parameters: { "xgb_gpu_hist", "xgb_grow_policy", "xgb_verbosity" } might not be used

I am fine tuning XGboost using Optuna, using the below code:-

class Hyperparameters_Optimization:
    def __init__(self, x_train, y_train, x_test, y_test) -> None:
        self.x_train = x_train
        self.y_train = y_train
        self.x_test = x_test
        self.y_test = y_test
        
    def Optimize_Xgboost_regressor(self, trial):
        logging.info("Optimize_Xgboost_regressor")
        param = {
            "xgb_verbosity": 0,
            "xgb_gpu_hist": 1,
            "max_depth": trial.suggest_int("max_depth", 1, 30),
            "learning_rate": trial.suggest_loguniform("learning_rate", 1e-7, 10.0),
            "n_estimators": trial.suggest_int("n_estimators", 1, 200),
            "xgb_grow_policy" : trial.suggest_categorical("grow_policy", ["depthwise", "lossguide"])
    
        }
        reg = xgb.XGBRegressor(**param)
        reg.fit(self.x_train, self.y_train)
        val_accuracy = reg.score(self.x_test, self.y_test)
        return val_accuracy
class ModelTraining:
    def __init__(self, x_train, y_train, x_test, y_test) -> None:
        self.x_train = x_train
        self.y_train = y_train
        self.x_test = x_test
        self.y_test = y_test
        
    def xgboost(self, fine_tuning=True):
        logging.info("Entered for training XGBoost model")
        try:
            if fine_tuning:
                hy_opt = Hyperparameters_Optimization(
                    self.x_train, self.y_train, self.x_test, self.y_test
                )
                study = optuna.create_study(direction="maximize")
                study.optimize(hy_opt.Optimize_Xgboost_regressor, n_trials=100)
                trial = study.best_trial
                n_estimators = trial.params["n_estimators"]
                learning_rate = trial.params["learning_rate"]
                max_depth = trial.params["max_depth"]
                reg = xgb.XGBRegressor(
                    n_estimators=n_estimators,
                    learning_rate=learning_rate,
                    max_depth=max_depth,
                )
                reg.fit(self.x_train, self.y_train)
                return reg

            else:
                model = xgb.XGBRegressor(
                    n_estimators=200, learning_rate=0.01, max_depth=20
                )
                model.fit(self.x_train, self.y_train)
                return model
        except Exception as e:
            logging.error("Error in training XGBoost model")
            logging.error(e)
            return None
model_train = ModelTraining(x_train, y_train , x_test , y_test) 
xgb_model = model_train.xgboost(fine_tuning = True)

I am getting the following warning as I want to train it on GPU:-

Parameters: { "xgb_gpu_hist", "xgb_grow_policy", "xgb_verbosity" } might not be used.

  This could be a false alarm, with some parameters getting used by language bindings but
  then being mistakenly passed down to XGBoost core, or some parameter actually being used
  but getting flagged wrongly here. Please open an issue if you find any such cases.

Hi, those are not the correct parameter names, xgb_ prefix is not needed. Please see the document for reference.

Also, could you please share where did you learn about the parameter names that you are using?