5
5
import numpy as np
6
6
import pandas as pd
7
7
8
- from .convex_term import ConvexTerm , Input , Var , Func , Grad
8
+ from .convex_term import ConvexTerm , Input , Var , Func , FuncWithGrad , Grad
9
9
from .utils import check_scalar , log_table_header , log_table_row
10
10
11
11
Start = list [tuple [mip .Var , float ]]
@@ -155,8 +155,8 @@ def add_linear_constr(self, constraint: mip.LinExpr, name: str = "") -> mip.Cons
155
155
def add_nonlinear_constr (
156
156
self ,
157
157
var : Var ,
158
- func : Func ,
159
- grad : Optional [Grad ] = None ,
158
+ func : Union [ Func , FuncWithGrad ] ,
159
+ grad : Optional [Union [ Grad , bool ] ] = None ,
160
160
name : str = "" ,
161
161
) -> ConvexTerm :
162
162
"""Add a nonlinear constraint to the model.
@@ -165,14 +165,16 @@ def add_nonlinear_constr(
165
165
var: mip.Var or iterable of mip.Var or mip.LinExprTensor
166
166
The variable(s) included in the term. This can be provided in the form of a single variable, an
167
167
iterable of multiple variables or a variable tensor.
168
- func: callable mapping float(s) or array to float
168
+ func: callable
169
169
A function for computing the term's value. This function should except one argument for each
170
170
variable in `var`. If `var` is a variable tensor, then the function should accept a single array.
171
- grad: callable mapping float(s) or array to float or array , default=`None`
171
+ grad: callable or bool , default=`None`
172
172
A function for computing the term's gradient. This function should except one argument for each
173
173
variable in `var`. If `var` is a variable tensor, then the function should accept a single array. If
174
- `None`, then the gradient is approximated numerically.
175
- using the central finite difference method.
174
+ `None`, then the gradient is approximated numerically using the central finite difference method. If
175
+ `grad` is instead a Boolean and is `True`, then `func` is assumed to return a tuple where the first
176
+ element is the function value and the second element is the gradient. This is useful when the gradient
177
+ is expensive to compute.
176
178
name: str, default=''
177
179
The name of the constraint.
178
180
@@ -192,8 +194,8 @@ def add_nonlinear_constr(
192
194
def add_objective_term (
193
195
self ,
194
196
var : Var ,
195
- func : Func ,
196
- grad : Optional [Grad ] = None ,
197
+ func : Union [ Func , FuncWithGrad ] ,
198
+ grad : Optional [Union [ Grad , bool ] ] = None ,
197
199
name : str = "" ,
198
200
) -> ConvexTerm :
199
201
"""Add an objective term to the model.
@@ -202,14 +204,16 @@ def add_objective_term(
202
204
var: mip.Var or iterable of mip.Var or mip.LinExprTensor
203
205
The variable(s) included in the term. This can be provided in the form of a single variable, an
204
206
iterable of multiple variables or a variable tensor.
205
- func: callable mapping float(s) or array to float
207
+ func: callable
206
208
A function for computing the term's value. This function should except one argument for each
207
209
variable in `var`. If `var` is a variable tensor, then the function should accept a single array.
208
- grad: callable mapping float(s) or array to float or array , default=`None`
210
+ grad: callable or bool , default=`None`
209
211
A function for computing the term's gradient. This function should except one argument for each
210
212
variable in `var`. If `var` is a variable tensor, then the function should accept a single array. If
211
- `None`, then the gradient is approximated numerically.
212
- using the central finite difference method.
213
+ `None`, then the gradient is approximated numerically using the central finite difference method. If
214
+ `grad` is instead a Boolean and is `True`, then `func` is assumed to return a tuple where the first
215
+ element is the function value and the second element is the gradient. This is useful when the gradient
216
+ is expensive to compute.
213
217
name: str, default=''
214
218
The name of the term.
215
219
@@ -365,7 +369,9 @@ def var_by_name(self, name: str) -> mip.Var:
365
369
"""Get a variable by name."""
366
370
return self ._model .var_by_name (name = name )
367
371
368
- def var_value (self , x : Union [Input , str ]) -> Union [float , np .ndarray ]:
372
+ def var_value (
373
+ self , x : Union [mip .Var , mip .LinExprTensor , str ]
374
+ ) -> Union [float , np .ndarray ]:
369
375
"""Get the value one or more decision variables corresponding to the best solution.
370
376
371
377
Args:
0 commit comments