我正在Keras模型上使用sklearn执行超参数调整优化(hyperopt)任务。我正在尝试使用Sklearn交叉验证来优化KerasClassifiers,以下代码如下:

def create_model():
    model = Sequential()
    model.add(
        Dense(output_dim=params['units1'],
              input_dim=features_.shape[1],
              kernel_initializer="glorot_uniform"))
    model.add(Activation(params['activation']))
    model.add(Dropout(params['dropout1']))
    model.add(BatchNormalization())
    ...
    model.compile(loss='binary_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])

    return model


现在我要做的是使用以下方式将Hyperopt参数传递给KerasClassifier

def objective(params, n_folds=N_FOLDS):
    """Objective function for Hyperparameter Optimization"""

    # Keep track of evals
    global ITERATION

    ITERATION += 1

    clf = KerasClassifier(build_fn=create_model,**params)

    start = timer()

    # Perform n_folds cross validation
    cv_results = cross_val_score(clf,
                                 features_,
                                 labels,
                                 cv=5
                                 ).mean()

    run_time = timer() - start

    # Loss must be minimized
    loss = -cv_results

    # Dictionary with information for evaluation
    return {
        'loss': loss,
        'params': params,
        'iteration': ITERATION,
        'train_time': run_time,
        'status': STATUS_OK
    }


我将搜索空间定义为:

space = {'units1': hp.choice('units1', [64, 128, 256, 512]),
    'units2': hp.choice('units2', [64, 128, 256, 512]),
    'dropout1': hp.choice('dropout1', [0.25, 0.5, 0.75]),
    'dropout2': hp.choice('dropout2', [0.25, 0.5, 0.75]),
    'batch_size': hp.choice('batch_size', [10, 20, 40, 60, 80, 100]),
    'nb_epochs': hp.choice('nb_epochs', [10, 50, 100]),
    'optimizer': opt_search_space,
    'activation': 'relu' }


运行优化

best = fmin(fn = objective, space = space, algo = tpe.suggest,
            max_evals = MAX_EVALS, trials = bayes_trials, rstate = np.random.RandomState(50))


但是它无法给出此错误:


  ValueError:激活不是合法参数


什么是正确的方法?

最佳答案

将超级参数作为create_model函数的输入参数。然后,您可以输入params字典。还要在搜索空间中将键nb_epochs更改为epochs。详细了解其他有效参数here

请尝试以下简化示例。

import numpy as np
import pandas as pd
from sklearn.datasets import make_classification
from sklearn.model_selection import cross_val_score
from tensorflow.keras import Sequential
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dense, Dropout

import time

def timer():
   now = time.localtime(time.time())
   return now[5]


X, y = make_classification(n_samples=1000, n_classes=2,
                           n_informative=4, weights=[0.7, 0.3],
                           random_state=0)


定义keras模型:

def create_model(units1, activation, dropout):
    model = Sequential()
    model.add(Dense(units1,
                    input_dim=X.shape[1],
                    kernel_initializer="glorot_uniform",
                    activation=activation))
    model.add(Dropout(dropout))
    model.add(Dense(1,activation='sigmoid'))

    model.compile(loss='binary_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])

    return model


def objective(params, n_folds=2):
    """Objective function for Hyperparameter Optimization"""

    # Keep track of evals
    global ITERATION

    ITERATION += 1

    clf = KerasClassifier(build_fn=create_model,**params)

    start = timer()

    # Perform n_folds cross validation
    cv_results = cross_val_score(clf, X, y,
                                 cv=5,
                                 ).mean()

    run_time = timer() - start

    # Loss must be minimized
    loss = -cv_results

    # Dictionary with information for evaluation
    return {
        'loss': loss,
        'params': params,
        'iteration': ITERATION,
        'train_time': run_time,
        'status': STATUS_OK
    }

from hyperopt import fmin, tpe, hp, Trials, STATUS_OK

space = {'units1': hp.choice('units1', [12, 64]),
         'dropout': hp.choice('dropout1', [0.25, 0.5]),
         'batch_size': hp.choice('batch_size', [10, 20]),
         'epochs': hp.choice('nb_epochs', [2, 3]),
         'activation': 'relu'
        }



global ITERATION
ITERATION = 0

bayes_trials = Trials()

best = fmin(fn = objective, space = space, algo = tpe.suggest,
            max_evals = 5, trials = bayes_trials, rstate = np.random.RandomState(50))

08-24 12:39