@@ -164,7 +164,7 @@ def gaussian(n):
164
164
return random .gauss (1 , 1 )
165
165
166
166
167
- @learn_with (AverageLearner1D , bounds = [ - 2 , 2 ] )
167
+ @learn_with (AverageLearner1D , bounds = ( - 2 , 2 ) )
168
168
def noisy_peak (
169
169
seed_x ,
170
170
sigma : uniform (1.5 , 2.5 ),
@@ -271,8 +271,8 @@ def test_uniform_sampling2D(learner_type, f, learner_kwargs):
271
271
"learner_type, bounds" ,
272
272
[
273
273
(Learner1D , (- 1 , 1 )),
274
- (Learner2D , [( - 1 , 1 ), (- 1 , 1 )] ),
275
- (LearnerND , [( - 1 , 1 ), (- 1 , 1 ), (- 1 , 1 )] ),
274
+ (Learner2D , (( - 1 , 1 ), (- 1 , 1 )) ),
275
+ (LearnerND , (( - 1 , 1 ), (- 1 , 1 ), (- 1 , 1 )) ),
276
276
],
277
277
)
278
278
def test_learner_accepts_lists (learner_type , bounds ):
@@ -480,7 +480,9 @@ def test_learner_performance_is_invariant_under_scaling(
480
480
yscale = 1000 * random .random ()
481
481
482
482
l_kwargs = dict (learner_kwargs )
483
- l_kwargs ["bounds" ] = xscale * np .array (l_kwargs ["bounds" ])
483
+ bounds = xscale * np .array (l_kwargs ["bounds" ])
484
+ bounds = tuple ((bounds ).tolist ()) # to satisfy typeguard tests
485
+ l_kwargs ["bounds" ] = bounds
484
486
485
487
def scale_x (x ):
486
488
if isinstance (learner , AverageLearner1D ):
0 commit comments