The library importation is; from tensorflow.keras.wrappers.scikit_learn import KerasClassifier which gives a module error. I change it to from scikeras.wrappers import KerasClassifier which worked but when coming to implementation am receiving some weird errors which i did not get earlier. Please have a look at the code snippet below and the errors and help me to solve;
def create_model_v4(lr,batch_size):
np.random.seed(1337)
model = Sequential()
model.add(Dense(256,activation=‘relu’,input_dim = X_train.shape[1]))
model.add(Dropout(0.3))
#model.add(Dense(128,activation=‘relu’,kernel_initializer=‘he_uniform’))
model.add(Dense(128,activation=‘relu’))
model.add(Dropout(0.3))
model.add(Dense(64,activation=‘relu’))
model.add(Dropout(0.2))
#model.add(Dense(32,activation=‘relu’,kernel_initializer=‘he_uniform’))
#model.add(Dropout(0.3))
model.add(Dense(32,activation=‘relu’))
model.add(Dense(1, activation=‘sigmoid’))
#compile model
optimizer = tf.keras.optimizers.Adam(learning_rate=lr)
model.compile(optimizer = optimizer,loss = 'binary_crossentropy', metrics = ['accuracy'])
return model
The second bit of the code is;
keras_estimator = KerasClassifier(build_fn=create_model_v4, verbose=1)
define the grid search parameters
param_random = {
‘batch_size’:[32, 64, 128],
“lr”:[0.01,0.1,0.001],}
kfold_splits = 3
random= RandomizedSearchCV(estimator=keras_estimator,
verbose=1,
cv=kfold_splits,
param_distributions=param_random,n_jobs=-1)
The third and the last and the one that is throwing an error currently is;
random_result = random.fit(X_train, y_train,validation_split=0.2,verbose=1)
##Summarize results
print(“Best: %f using %s” % (random_result.best_score_, random_result.best_params_))
means = random_result.cv_results_[‘mean_test_score’]
stds = random_result.cv_results_[‘std_test_score’]
params = random_result.cv_results_[‘params’]
The error is;
Fitting 3 folds for each of 9 candidates, totalling 27 fits
ValueError Traceback (most recent call last)
Cell In[111], line 2
1 ‘’‘’‘’
----> 2 random_result = random.fit(X_train, y_train,validation_split=0.2,verbose=1)
4 # Summarize results
5 print(“Best: %f using %s” % (random_result.best_score_, random_result.best_params_))
File ~\anaconda3\Env\Lib\site-packages\sklearn\base.py:1473, in _fit_context..decorator..wrapper(estimator, *args, **kwargs)
1466 estimator._validate_params()
1468 with config_context(
1469 skip_parameter_validation=(
1470 prefer_skip_nested_validation or global_skip_validation
1471 )
1472 ):
→ 1473 return fit_method(estimator, *args, **kwargs)
File ~\anaconda3\Env\Lib\site-packages\sklearn\model_selection_search.py:968, in BaseSearchCV.fit(self, X, y, **params)
962 results = self._format_results(
963 all_candidate_params, n_splits, all_out, all_more_results
964 )
966 return results
→ 968 self._run_search(evaluate_candidates)
970 # multimetric is determined here because in the case of a callable
971 # self.scoring the return type is only known after calling
972 first_test_score = all_out[0][“test_scores”]
File ~\anaconda3\Env\Lib\site-packages\sklearn\model_selection_search.py:1930, in RandomizedSearchCV._run_search(self, evaluate_candidates)
1928 def _run_search(self, evaluate_candidates):
1929 “”“Search n_iter candidates from param_distributions”“”
→ 1930 evaluate_candidates(
1931 ParameterSampler(
1932 self.param_distributions, self.n_iter, random_state=self.random_state
1933 )
1934 )
File ~\anaconda3\Env\Lib\site-packages\sklearn\model_selection_search.py:914, in BaseSearchCV.fit..evaluate_candidates(candidate_params, cv, more_results)
906 if self.verbose > 0:
907 print(
908 “Fitting {0} folds for each of {1} candidates,”
909 " totalling {2} fits".format(
910 n_splits, n_candidates, n_candidates * n_splits
911 )
912 )
→ 914 out = parallel(
915 delayed(_fit_and_score)(
916 clone(base_estimator),
917 X,
918 y,
919 train=train,
920 test=test,
921 parameters=parameters,
922 split_progress=(split_idx, n_splits),
923 candidate_progress=(cand_idx, n_candidates),
924 **fit_and_score_kwargs,
925 )
926 for (cand_idx, parameters), (split_idx, (train, test)) in product(
927 enumerate(candidate_params),
928 enumerate(cv.split(X, y, **routed_params.splitter.split)),
929 )
930 )
932 if len(out) < 1:
933 raise ValueError(
934 "No fits were performed. "
935 "Was the CV iterator empty? "
936 “Were there no candidates?”
937 )
File ~\anaconda3\Env\Lib\site-packages\sklearn\utils\parallel.py:67, in Parallel.call(self, iterable)
62 config = get_config()
63 iterable_with_config = (
64 (_with_config(delayed_func, config), args, kwargs)
65 for delayed_func, args, kwargs in iterable
66 )
—> 67 return super().call(iterable_with_config)
File ~\anaconda3\Env\Lib\site-packages\joblib\parallel.py:1098, in Parallel.call(self, iterable)
1095 self._iterating = False
1097 with self._backend.retrieval_context():
→ 1098 self.retrieve()
1099 # Make sure that we get a last message telling us we are done
1100 elapsed_time = time.time() - self._start_time
File ~\anaconda3\Env\Lib\site-packages\joblib\parallel.py:975, in Parallel.retrieve(self)
973 try:
974 if getattr(self._backend, ‘supports_timeout’, False):
→ 975 self._output.extend(job.get(timeout=self.timeout))
976 else:
977 self._output.extend(job.get())
File ~\anaconda3\Env\Lib\site-packages\joblib_parallel_backends.py:567, in LokyBackend.wrap_future_result(future, timeout)
564 “”“Wrapper for Future.result to implement the same behaviour as
565 AsyncResults.get from multiprocessing.”“”
566 try:
→ 567 return future.result(timeout=timeout)
568 except CfTimeoutError as e:
569 raise TimeoutError from e
File ~\anaconda3\Env\Lib\concurrent\futures_base.py:456, in Future.result(self, timeout)
454 raise CancelledError()
455 elif self._state == FINISHED:
→ 456 return self.__get_result()
457 else:
458 raise TimeoutError()
File ~\anaconda3\Env\Lib\concurrent\futures_base.py:401, in Future.__get_result(self)
399 if self._exception:
400 try:
→ 401 raise self._exception
402 finally:
403 # Break a reference cycle with the exception in self._exception
404 self = None
ValueError: Invalid parameter lr for estimator KerasClassifier.
This issue can likely be resolved by setting this parameter in the KerasClassifier constructor:
KerasClassifier(lr=0.01)
Check the list of available parameters with estimator.get_params().keys()