1313X = X_train
1414y = y_train
1515
16+ # [start:simplest_example]
17+ from hyperactive .opt .gfo import HillClimbing
18+
19+ def score (p ):
20+ return - (p ["x" ] ** 2 ) # Find x that minimizes x²
21+
22+ opt = HillClimbing ({"x" : range (- 10 , 11 )}, experiment = score )
23+ best = opt .solve () # {"x": 0}
24+ # [end:simplest_example]
25+
26+
1627# [start:simple_objective]
1728def objective (params ):
1829 x = params ["x" ]
@@ -35,6 +46,14 @@ def objective(params):
3546# [end:sklearn_experiment_intro]
3647
3748
49+ # [start:sklearn_3_lines]
50+ from hyperactive .experiment .integrations import SklearnCvExperiment
51+ from sklearn .ensemble import GradientBoostingClassifier
52+
53+ experiment = SklearnCvExperiment (GradientBoostingClassifier (), X , y , cv = 5 )
54+ # [end:sklearn_3_lines]
55+
56+
3857# [start:optimizer_imports]
3958from hyperactive .opt .gfo import (
4059 HillClimbing , # Local search
@@ -118,6 +137,67 @@ def my_objective(params):
118137# [end:warm_starting]
119138
120139
140+ # [start:swap_hill_climbing]
141+ from hyperactive .opt .gfo import HillClimbing
142+
143+ optimizer = HillClimbing (search_space , experiment = experiment )
144+ best = optimizer .solve ()
145+ # [end:swap_hill_climbing]
146+
147+
148+ # [start:swap_bayesian]
149+ from hyperactive .opt .gfo import BayesianOptimizer
150+
151+ optimizer = BayesianOptimizer (search_space , experiment = experiment )
152+ best = optimizer .solve ()
153+ # [end:swap_bayesian]
154+
155+
156+ # [start:swap_genetic]
157+ from hyperactive .opt .gfo import GeneticAlgorithmOptimizer
158+
159+ optimizer = GeneticAlgorithmOptimizer (search_space , experiment = experiment )
160+ best = optimizer .solve ()
161+ # [end:swap_genetic]
162+
163+
164+ # [start:complete_example]
165+ import numpy as np
166+ from sklearn .datasets import load_iris
167+ from sklearn .ensemble import RandomForestClassifier
168+ from hyperactive .experiment .integrations import SklearnCvExperiment
169+ from hyperactive .opt .gfo import BayesianOptimizer
170+
171+ # 1. Load your data
172+ X , y = load_iris (return_X_y = True )
173+
174+ # 2. Define the experiment (what to optimize)
175+ experiment = SklearnCvExperiment (
176+ estimator = RandomForestClassifier (),
177+ X = X , y = y , cv = 5 ,
178+ )
179+
180+ # 3. Define the search space (where to search)
181+ search_space = {
182+ "n_estimators" : list (range (10 , 200 , 10 )),
183+ "max_depth" : [3 , 5 , 10 , 20 , None ],
184+ "min_samples_split" : [2 , 5 , 10 ],
185+ }
186+
187+ # 4. Choose an optimizer (how to search)
188+ optimizer = BayesianOptimizer (
189+ search_space = search_space ,
190+ n_iter = 50 ,
191+ experiment = experiment ,
192+ random_state = 42 ,
193+ )
194+
195+ # 5. Run and get the best parameters
196+ best_params = optimizer .solve ()
197+ print (f"Best parameters: { best_params } " )
198+ # [end:complete_example]
199+
200+
121201if __name__ == "__main__" :
122202 # The actual test code runs here
123203 print ("Introduction snippet file is importable!" )
0 commit comments