Skip to content

Commit

Permalink
change onehotencoder used, initial pop fix
Browse files Browse the repository at this point in the history
  • Loading branch information
perib committed Feb 27, 2024
1 parent 3e1936f commit 4681389
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions tpot2/config/transformers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from functools import partial
import numpy as np

from tpot2.builtin_modules import ZeroCount, OneHotEncoder
from tpot2.builtin_modules import ZeroCount, OneHotEncoder, ColumnOneHotEncoder
from sklearn.preprocessing import Binarizer
from sklearn.decomposition import FastICA
from sklearn.cluster import FeatureAgglomeration
Expand Down Expand Up @@ -99,5 +99,5 @@ def make_transformer_config_dictionary(random_state=None, n_features=10):
RobustScaler: {},
StandardScaler: {},
ZeroCount: params_tpot_builtins_ZeroCount,
OneHotEncoder: params_tpot_builtins_OneHotEncoder,
ColumnOneHotEncoder: params_tpot_builtins_OneHotEncoder,
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def estimator_graph_individual_generator(
starting_ops = []
if inner_config_dict is not None:
starting_ops.append(ind._mutate_insert_inner_node)
if leaf_config_dict is not None:
if leaf_config_dict is not None or inner_config_dict is not None:
starting_ops.append(ind._mutate_insert_leaf)
n_nodes -= 1

Expand Down

0 comments on commit 4681389

Please sign in to comment.