[D]time series forecasting hyperparameter tuning
claude coded me this i dont think you can train a model with lags and staff like this i think you need to use somehow recursive staff in this part too:
print(f"n🔍 Optimizing LightGBM hyperparameters ({n_trials} trials)...") def objective(trial): params = { 'n_estimators': trial.suggest_int('n_estimators', 500, 3000), 'learning_rate': trial.suggest_float('learning_rate', 0.01, 0.1, log=True), 'num_leaves': trial.suggest_int('num_leaves', 20, 100), 'max_depth': trial.suggest_int('max_depth', 4, 12), 'min_child_samples': trial.suggest_int('min_child_samples', 10, 50), 'subsample': trial.suggest_float('subsample', 0.6, 1.0), 'colsample_bytree': trial.suggest_float('colsample_bytree', 0.6, 1.0), 'reg_alpha': trial.suggest_float('reg_alpha', 0.0, 2.0), 'reg_lambda': trial.suggest_float('reg_lambda', 0.0, 3.0), 'random_state': 42, 'verbose': -1 } model = LGBMRegressor(**params) # Use early stopping from sklearn.model_selection import train_test_split X_tr, X_val, y_tr, y_val = train_test_split(X_train, y_train, test_size=0.2, random_state=42) model.fit(X_tr, y_tr, eval_set=[(X_val, y_val)], callbacks=[optuna.integration.LightGBMPruningCallback(trial, 'l2')]) preds = model.predict(X_val) rmse = sqrt(mean_squared_error(y_val, preds)) return rmse study = optuna.create_study(direction='minimize', sampler=optuna.samplers.TPESampler(seed=42)) study.optimize(objective, n_trials=n_trials, show_progress_bar=True) print(f"✓ Best RMSE: {study.best_value:.4f}") print(f"✓ Best parameters: {study.best_params}") return study.best_params
submitted by /u/zxcvbnm9174
[link] [comments]
Like
0
Liked
Liked