Я пытаюсь применить глубокое обучение для создания регрессии (6 независимых переменных и 1 зависимая переменная)
Я пытаюсь использовать байесовскую оптимизацию, чтобы найти лучший параметр для регрессии ИНС (искусственная нейронная сеть), но получаю ошибка.
# Step 1: Import necessary libraries for Bayesian Optimization
import numpy as np
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense, Dropout, Input
from keras.optimizers import Adam, SGD
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from scikeras.wrappers import KerasRegressor
from sklearn.metrics import mean_absolute_error
from keras.callbacks import EarlyStopping
import warnings
from skopt import BayesSearchCV
from skopt.space import Real, Categorical, Integer
# Suppress warnings for cleaner output
warnings.filterwarnings('ignore')
# Step 2: Load the dataset
df = pd.read_excel('...')
# Define features (X) and target (y)
X = df.drop(columns=['Y']) # Features
y = df['Y'] # Target variable
# Step 3: Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Step 4: Standardize the features and target variable
scaler_X = StandardScaler()
scaler_y = StandardScaler()
# Scale the features
X_train_scaled = scaler_X.fit_transform(X_train)
X_test_scaled = scaler_X.transform(X_test)
# Scale the target variable
y_train_scaled = scaler_y.fit_transform(y_train.values.reshape(-1, 1)).flatten()
y_test_scaled = scaler_y.transform(y_test.values.reshape(-1, 1)).flatten()
# Step 5: Define the model-building function
def make_regression_ann(Optimizer_trial='adam', hidden_layers=1, dropout_rate=0.2,
weight_init='he_normal', activation_function='relu',
learning_rate=0.001, momentum=0.9):
# Choose optimizer based on parameters
if Optimizer_trial == 'adam':
optimizer = Adam(learning_rate=learning_rate)
elif Optimizer_trial == 'sgd':
optimizer = SGD(learning_rate=learning_rate, momentum=momentum)
else:
raise ValueError(f"Unknown optimizer: {Optimizer_trial}")
model = Sequential()
# Input layer + first hidden layer
model.add(Input(shape=(X_train_scaled.shape[1],)))
model.add(Dense(units=5, kernel_initializer=weight_init, activation=activation_function))
model.add(Dropout(rate=dropout_rate)) # Apply dropout
# Additional hidden layers
for _ in range(hidden_layers - 1):
model.add(Dense(units=5, kernel_initializer=weight_init, activation=activation_function))
model.add(Dropout(rate=dropout_rate)) # Apply dropout to each hidden layer
# Output layer
model.add(Dense(1, kernel_initializer='normal')) # Single output variable
# Compile the model
model.compile(loss='mean_absolute_error', optimizer=optimizer)
return model
# Step 6: Define hyperparameter space for Bayesian Optimization
bayes_search_space = {
'batch_size': Integer(10, 50), # Integer range for batch size
'epochs': Integer(10, 50), # Integer range for epochs
'model__Optimizer_trial': Categorical(['adam', 'sgd']),
'model__hidden_layers': Integer(1, 3), # Number of hidden layers
'model__dropout_rate': Real(0.1, 0.5), # Dropout rate as a continuous variable
'model__weight_init': Categorical(['he_normal', 'glorot_uniform']),
'model__activation_function': Categorical(['relu', 'tanh']),
'model__learning_rate': Real(1e-4, 1e-2, prior='log-uniform'), # Learning rate in log scale
'model__momentum': Real(0.5, 0.9) # Momentum for SGD
}
# Step 7: Wrap the model for Bayesian Optimization
RegModel_Bayes = KerasRegressor(model=make_regression_ann, verbose=0)
# Step 8: Set up Bayesian Optimization
# Adding Early Stopping as a callback
early_stopping_bayes = EarlyStopping(monitor='loss', patience=3, verbose=1)
bayes_search = BayesSearchCV(
estimator=RegModel_Bayes,
search_spaces=bayes_search_space,
n_iter=2, # Number of iterations for Bayesian Optimization
scoring='neg_mean_absolute_error',
cv=5, # Reduce CV folds to speed up the process
n_jobs=-1, # Use all available CPU cores
verbose=0,
random_state=42
)
# Step 9: Fit the Bayesian Optimization model
try:
bayes_search.fit(X_train_scaled, y_train_scaled, callback=[early_stopping_bayes])
except Exception as e:
print("Bayesian Optimization failed due to memory or worker issues. Consider reducing the parameter space or using fewer iterations.")
print("Error:", e)
# Step 10: Print the best parameters
if bayes_search.best_params_:
print("Best Parameters (Bayesian Optimization):", bayes_search.best_params_)
# Step 11: Evaluate the best model
def evaluate_model(grid_search, X_test_scaled, y_test, scaler_y):
best_model = grid_search.best_estimator_ # Extract the best Keras model
predictions_scaled = best_model.predict(X_test_scaled) # Make predictions on scaled test data
predictions = scaler_y.inverse_transform(predictions_scaled.reshape(-1, 1)) # Inverse transform predictions
y_test_original = scaler_y.inverse_transform(y_test.reshape(-1, 1)) # Inverse transform y_test
mae = mean_absolute_error(y_test_original, predictions) # Calculate MAE
print("Mean Absolute Error (MAE) on Test Data:", mae)
return mae
# Step 12: Evaluate the best model
if bayes_search.best_params_:
evaluate_model(bayes_search, X_test_scaled, y_test_scaled, scaler_y)
Но я получаю эту ошибку, но не знаю, как ее исправить. Я попробовалchatgpt, но не могу
Bayesian Optimization failed due to memory or worker issues. Consider reducing the parameter space or using fewer iterations.
Error: callback should be either a callable or a list of callables.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In[3], line 111
108 print("Error:", e)
110 # Step 10: Print the best parameters
--> 111 if bayes_search.best_params_:
112 print("Best Parameters (Bayesian Optimization):", bayes_search.best_params_)
114 # Step 11: Evaluate the best model
AttributeError: 'BayesSearchCV' object has no attribute 'best_params_'
Я пытаюсь применить глубокое обучение для создания регрессии (6 независимых переменных и 1 зависимая переменная) Я пытаюсь использовать байесовскую оптимизацию, чтобы найти лучший параметр для регрессии ИНС (искусственная нейронная сеть), но получаю ошибка. [code]# Step 1: Import necessary libraries for Bayesian Optimization import numpy as np import pandas as pd from keras.models import Sequential from keras.layers import Dense, Dropout, Input from keras.optimizers import Adam, SGD from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler from scikeras.wrappers import KerasRegressor from sklearn.metrics import mean_absolute_error from keras.callbacks import EarlyStopping import warnings from skopt import BayesSearchCV from skopt.space import Real, Categorical, Integer
# Suppress warnings for cleaner output warnings.filterwarnings('ignore')
# Step 2: Load the dataset df = pd.read_excel('...')
# Define features (X) and target (y) X = df.drop(columns=['Y']) # Features y = df['Y'] # Target variable
# Step 3: Split the data into training and testing sets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Step 4: Standardize the features and target variable scaler_X = StandardScaler() scaler_y = StandardScaler()
# Scale the features X_train_scaled = scaler_X.fit_transform(X_train) X_test_scaled = scaler_X.transform(X_test)
# Additional hidden layers for _ in range(hidden_layers - 1): model.add(Dense(units=5, kernel_initializer=weight_init, activation=activation_function)) model.add(Dropout(rate=dropout_rate)) # Apply dropout to each hidden layer
# Output layer model.add(Dense(1, kernel_initializer='normal')) # Single output variable
# Compile the model model.compile(loss='mean_absolute_error', optimizer=optimizer) return model
# Step 6: Define hyperparameter space for Bayesian Optimization bayes_search_space = { 'batch_size': Integer(10, 50), # Integer range for batch size 'epochs': Integer(10, 50), # Integer range for epochs 'model__Optimizer_trial': Categorical(['adam', 'sgd']), 'model__hidden_layers': Integer(1, 3), # Number of hidden layers 'model__dropout_rate': Real(0.1, 0.5), # Dropout rate as a continuous variable 'model__weight_init': Categorical(['he_normal', 'glorot_uniform']), 'model__activation_function': Categorical(['relu', 'tanh']), 'model__learning_rate': Real(1e-4, 1e-2, prior='log-uniform'), # Learning rate in log scale 'model__momentum': Real(0.5, 0.9) # Momentum for SGD }
# Step 7: Wrap the model for Bayesian Optimization RegModel_Bayes = KerasRegressor(model=make_regression_ann, verbose=0)
# Step 8: Set up Bayesian Optimization # Adding Early Stopping as a callback early_stopping_bayes = EarlyStopping(monitor='loss', patience=3, verbose=1)
bayes_search = BayesSearchCV( estimator=RegModel_Bayes, search_spaces=bayes_search_space, n_iter=2, # Number of iterations for Bayesian Optimization scoring='neg_mean_absolute_error', cv=5, # Reduce CV folds to speed up the process n_jobs=-1, # Use all available CPU cores verbose=0, random_state=42 )
# Step 9: Fit the Bayesian Optimization model try: bayes_search.fit(X_train_scaled, y_train_scaled, callback=[early_stopping_bayes]) except Exception as e: print("Bayesian Optimization failed due to memory or worker issues. Consider reducing the parameter space or using fewer iterations.") print("Error:", e)
# Step 10: Print the best parameters if bayes_search.best_params_: print("Best Parameters (Bayesian Optimization):", bayes_search.best_params_)
# Step 11: Evaluate the best model def evaluate_model(grid_search, X_test_scaled, y_test, scaler_y): best_model = grid_search.best_estimator_ # Extract the best Keras model predictions_scaled = best_model.predict(X_test_scaled) # Make predictions on scaled test data predictions = scaler_y.inverse_transform(predictions_scaled.reshape(-1, 1)) # Inverse transform predictions y_test_original = scaler_y.inverse_transform(y_test.reshape(-1, 1)) # Inverse transform y_test mae = mean_absolute_error(y_test_original, predictions) # Calculate MAE print("Mean Absolute Error (MAE) on Test Data:", mae) return mae
# Step 12: Evaluate the best model if bayes_search.best_params_: evaluate_model(bayes_search, X_test_scaled, y_test_scaled, scaler_y) [/code] Но я получаю эту ошибку, но не знаю, как ее исправить. Я попробовалchatgpt, но не могу [code]Bayesian Optimization failed due to memory or worker issues. Consider reducing the parameter space or using fewer iterations. Error: callback should be either a callable or a list of callables. --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) Cell In[3], line 111 108 print("Error:", e) 110 # Step 10: Print the best parameters --> 111 if bayes_search.best_params_: 112 print("Best Parameters (Bayesian Optimization):", bayes_search.best_params_) 114 # Step 11: Evaluate the best model
AttributeError: 'BayesSearchCV' object has no attribute 'best_params_' [/code] Как это решить?