@@ -623,10 +623,10 @@ def test_optimize_acqf_batch_limit(self) -> None:
623
623
624
624
for ic_shape , expected_shape in [((2 , 1 , dim ), 2 ), ((2 , dim ), 1 )]:
625
625
with self .subTest (gen_candidates = gen_candidates ):
626
+ ics = torch .ones ((ic_shape ))
626
627
with self .assertWarnsRegex (
627
628
RuntimeWarning , "botorch will default to old behavior"
628
629
):
629
- ics = torch .ones ((ic_shape ))
630
630
_candidates , acq_value_list = optimize_acqf (
631
631
acq_function = SinOneOverXAcqusitionFunction (),
632
632
bounds = torch .stack ([- 1 * torch .ones (dim ), torch .ones (dim )]),
@@ -638,8 +638,7 @@ def test_optimize_acqf_batch_limit(self) -> None:
638
638
gen_candidates = gen_candidates ,
639
639
batch_initial_conditions = ics ,
640
640
)
641
-
642
- self .assertEqual (acq_value_list .shape , (expected_shape ,))
641
+ self .assertEqual (acq_value_list .shape , (expected_shape ,))
643
642
644
643
def test_optimize_acqf_runs_given_batch_initial_conditions (self ):
645
644
num_restarts , raw_samples , dim = 1 , 2 , 3
@@ -915,27 +914,6 @@ def nlc1(x):
915
914
torch .allclose (acq_value , torch .tensor ([4 ], ** tkwargs ), atol = 1e-3 )
916
915
)
917
916
918
- # Make sure we return the initial solution if SLSQP fails to return
919
- # a feasible point.
920
- with mock .patch (
921
- "botorch.generation.gen.minimize_with_timeout"
922
- ) as mock_minimize :
923
- # By setting "success" to True and "status" to 0, we prevent a
924
- # warning that `minimize` failed, which isn't the behavior
925
- # we're looking to test here.
926
- mock_minimize .return_value = OptimizeResult (
927
- x = np .array ([4 , 4 , 4 ]), success = True , status = 0
928
- )
929
- candidates , acq_value = optimize_acqf (
930
- acq_function = mock_acq_function ,
931
- bounds = bounds ,
932
- q = 1 ,
933
- nonlinear_inequality_constraints = [(nlc1 , True )],
934
- batch_initial_conditions = batch_initial_conditions ,
935
- num_restarts = 1 ,
936
- )
937
- self .assertAllClose (candidates , batch_initial_conditions [0 , ...])
938
-
939
917
# Constrain all variables to be >= 1. The global optimum is 2.45 and
940
918
# is attained by some permutation of [1, 1, 2]
941
919
def nlc2 (x ):
@@ -1685,10 +1663,10 @@ def test_optimize_acqf_mixed_q2(self, mock_optimize_acqf):
1685
1663
self .assertTrue (torch .equal (acq_value , expected_acq_value ))
1686
1664
1687
1665
def test_optimize_acqf_mixed_empty_ff (self ):
1666
+ mock_acq_function = MockAcquisitionFunction ()
1688
1667
with self .assertRaisesRegex (
1689
1668
ValueError , expected_regex = "fixed_features_list must be non-empty."
1690
1669
):
1691
- mock_acq_function = MockAcquisitionFunction ()
1692
1670
optimize_acqf_mixed (
1693
1671
acq_function = mock_acq_function ,
1694
1672
q = 1 ,
@@ -1715,9 +1693,9 @@ def test_optimize_acqf_mixed_return_best_only_q2(self):
1715
1693
)
1716
1694
1717
1695
def test_optimize_acqf_one_shot_large_q (self ):
1718
- with self . assertRaises ( ValueError ):
1719
- mock_acq_function = MockOneShotAcquisitionFunction ()
1720
- fixed_features_list = [{ i : i * 0.1 } for i in range ( 2 )]
1696
+ mock_acq_function = MockOneShotAcquisitionFunction ()
1697
+ fixed_features_list = [{ i : i * 0.1 } for i in range ( 2 )]
1698
+ with self . assertRaisesRegex ( UnsupportedError , "OneShotAcquisitionFunction" ):
1721
1699
optimize_acqf_mixed (
1722
1700
acq_function = mock_acq_function ,
1723
1701
q = 2 ,
0 commit comments