ortools.math_opt.python.callback

Defines how to request a callback and the input and output of a callback.

  1# Copyright 2010-2024 Google LLC
  2# Licensed under the Apache License, Version 2.0 (the "License");
  3# you may not use this file except in compliance with the License.
  4# You may obtain a copy of the License at
  5#
  6#     http://www.apache.org/licenses/LICENSE-2.0
  7#
  8# Unless required by applicable law or agreed to in writing, software
  9# distributed under the License is distributed on an "AS IS" BASIS,
 10# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 11# See the License for the specific language governing permissions and
 12# limitations under the License.
 13
 14"""Defines how to request a callback and the input and output of a callback."""
 15import dataclasses
 16import datetime
 17import enum
 18import math
 19from typing import Dict, List, Mapping, Optional, Set, Union
 20
 21from ortools.math_opt import callback_pb2
 22from ortools.math_opt.python import model
 23from ortools.math_opt.python import sparse_containers
 24
 25
 26@enum.unique
 27class Event(enum.Enum):
 28    """The supported events during a solve for callbacks.
 29
 30    * UNSPECIFIED: The event is unknown (typically an internal error).
 31    * PRESOLVE: The solver is currently running presolve. Gurobi only.
 32    * SIMPLEX: The solver is currently running the simplex method. Gurobi only.
 33    * MIP: The solver is in the MIP loop (called periodically before starting a
 34        new node). Useful for early termination. Note that this event does not
 35        provide information on LP relaxations nor about new incumbent solutions.
 36        Gurobi only.
 37    * MIP_SOLUTION: Called every time a new MIP incumbent is found. Fully
 38        supported by Gurobi, partially supported by CP-SAT (you can observe new
 39        solutions, but not add lazy constraints).
 40    * MIP_NODE: Called inside a MIP node. Note that there is no guarantee that the
 41        callback function will be called on every node. That behavior is
 42        solver-dependent. Gurobi only.
 43
 44        Disabling cuts using SolveParameters may interfere with this event being
 45        called and/or adding cuts at this event, the behavior is solver specific.
 46    * BARRIER: Called in each iterate of an interior point/barrier method. Gurobi
 47        only.
 48    """
 49
 50    UNSPECIFIED = callback_pb2.CALLBACK_EVENT_UNSPECIFIED
 51    PRESOLVE = callback_pb2.CALLBACK_EVENT_PRESOLVE
 52    SIMPLEX = callback_pb2.CALLBACK_EVENT_SIMPLEX
 53    MIP = callback_pb2.CALLBACK_EVENT_MIP
 54    MIP_SOLUTION = callback_pb2.CALLBACK_EVENT_MIP_SOLUTION
 55    MIP_NODE = callback_pb2.CALLBACK_EVENT_MIP_NODE
 56    BARRIER = callback_pb2.CALLBACK_EVENT_BARRIER
 57
 58
 59PresolveStats = callback_pb2.CallbackDataProto.PresolveStats
 60SimplexStats = callback_pb2.CallbackDataProto.SimplexStats
 61BarrierStats = callback_pb2.CallbackDataProto.BarrierStats
 62MipStats = callback_pb2.CallbackDataProto.MipStats
 63
 64
 65@dataclasses.dataclass
 66class CallbackData:
 67    """Input to the solve callback (produced by the solver).
 68
 69    Attributes:
 70      event: The current state of the solver when the callback is run. The event
 71        (partially) determines what data is available and what the user is allowed
 72        to return.
 73      solution: A solution to the primal optimization problem, if available. For
 74        Event.MIP_SOLUTION, solution is always present, integral, and feasible.
 75        For Event.MIP_NODE, the primal_solution contains the current LP-node
 76        relaxation. In some cases, no solution will be available (e.g. because LP
 77        was infeasible or the solve was imprecise). Empty for other events.
 78      messages: Logs generated by the underlying solver, as a list of strings
 79        without new lines (each string is a line). Only filled on Event.MESSAGE.
 80      runtime: The time since Solve() was invoked.
 81      presolve_stats: Filled for Event.PRESOLVE only.
 82      simplex_stats: Filled for Event.SIMPLEX only.
 83      barrier_stats: Filled for Event.BARRIER only.
 84      mip_stats: Filled for the events MIP, MIP_SOLUTION and MIP_NODE only.
 85    """
 86
 87    event: Event = Event.UNSPECIFIED
 88    solution: Optional[Dict[model.Variable, float]] = None
 89    messages: List[str] = dataclasses.field(default_factory=list)
 90    runtime: datetime.timedelta = datetime.timedelta()
 91    presolve_stats: PresolveStats = dataclasses.field(default_factory=PresolveStats)
 92    simplex_stats: SimplexStats = dataclasses.field(default_factory=SimplexStats)
 93    barrier_stats: BarrierStats = dataclasses.field(default_factory=BarrierStats)
 94    mip_stats: MipStats = dataclasses.field(default_factory=MipStats)
 95
 96
 97def parse_callback_data(
 98    cb_data: callback_pb2.CallbackDataProto, mod: model.Model
 99) -> CallbackData:
100    """Creates a CallbackData from an equivalent proto.
101
102    Args:
103      cb_data: A protocol buffer with the information the user needs for a
104        callback.
105      mod: The model being solved.
106
107    Returns:
108      An equivalent CallbackData.
109
110    Raises:
111      ValueError: if cb_data is invalid or inconsistent with mod, e.g. cb_data
112      refers to a variable id not in mod.
113    """
114    result = CallbackData()
115    result.event = Event(cb_data.event)
116    if cb_data.HasField("primal_solution_vector"):
117        primal_solution = cb_data.primal_solution_vector
118        result.solution = {
119            mod.get_variable(id): val
120            for (id, val) in zip(primal_solution.ids, primal_solution.values)
121        }
122    result.runtime = cb_data.runtime.ToTimedelta()
123    result.presolve_stats = cb_data.presolve_stats
124    result.simplex_stats = cb_data.simplex_stats
125    result.barrier_stats = cb_data.barrier_stats
126    result.mip_stats = cb_data.mip_stats
127    return result
128
129
130@dataclasses.dataclass
131class CallbackRegistration:
132    """Request the events and input data and reports output types for a callback.
133
134    Note that it is an error to add a constraint in a callback without setting
135    add_cuts and/or add_lazy_constraints to true.
136
137    Attributes:
138      events: When the callback should be invoked, by default, never. If an
139        unsupported event for a solver/model combination is selected, an
140        excecption is raised, see Event above for details.
141      mip_solution_filter: restricts the variable values returned in
142        CallbackData.solution (the callback argument) at each MIP_SOLUTION event.
143        By default, values are returned for all variables.
144      mip_node_filter: restricts the variable values returned in
145        CallbackData.solution (the callback argument) at each MIP_NODE event. By
146        default, values are returned for all variables.
147      add_cuts: The callback may add "user cuts" (linear constraints that
148        strengthen the LP without cutting of integer points) at MIP_NODE events.
149      add_lazy_constraints: The callback may add "lazy constraints" (linear
150        constraints that cut off integer solutions) at MIP_NODE or MIP_SOLUTION
151        events.
152    """
153
154    events: Set[Event] = dataclasses.field(default_factory=set)
155    mip_solution_filter: sparse_containers.VariableFilter = (
156        sparse_containers.VariableFilter()
157    )
158    mip_node_filter: sparse_containers.VariableFilter = (
159        sparse_containers.VariableFilter()
160    )
161    add_cuts: bool = False
162    add_lazy_constraints: bool = False
163
164    def to_proto(self) -> callback_pb2.CallbackRegistrationProto:
165        """Returns an equivalent proto to this CallbackRegistration."""
166        result = callback_pb2.CallbackRegistrationProto()
167        result.request_registration[:] = sorted([event.value for event in self.events])
168        result.mip_solution_filter.CopyFrom(self.mip_solution_filter.to_proto())
169        result.mip_node_filter.CopyFrom(self.mip_node_filter.to_proto())
170        result.add_cuts = self.add_cuts
171        result.add_lazy_constraints = self.add_lazy_constraints
172        return result
173
174
175@dataclasses.dataclass
176class GeneratedConstraint:
177    """A linear constraint to add inside a callback.
178
179    Models a constraint of the form:
180      lb <= sum_{i in I} a_i * x_i <= ub
181
182    Two types of generated linear constraints are supported based on is_lazy:
183      * The "lazy constraint" can remove integer points from the feasible
184        region and can be added at event Event.MIP_NODE or
185        Event.MIP_SOLUTION
186      * The "user cut" (on is_lazy=false) strengthens the LP without removing
187        integer points. It can only be added at Event.MIP_NODE.
188
189
190    Attributes:
191      terms: The variables and linear coefficients in the constraint, a_i and x_i
192        in the model above.
193      lower_bound: lb in the model above.
194      upper_bound: ub in the model above.
195      is_lazy: Indicates if the constraint should be interpreted as a "lazy
196        constraint" (cuts off integer solutions) or a "user cut" (strengthens the
197        LP relaxation without cutting of integer solutions).
198    """
199
200    terms: Mapping[model.Variable, float] = dataclasses.field(default_factory=dict)
201    lower_bound: float = -math.inf
202    upper_bound: float = math.inf
203    is_lazy: bool = False
204
205    def to_proto(
206        self,
207    ) -> callback_pb2.CallbackResultProto.GeneratedLinearConstraint:
208        """Returns an equivalent proto for the constraint."""
209        result = callback_pb2.CallbackResultProto.GeneratedLinearConstraint()
210        result.is_lazy = self.is_lazy
211        result.lower_bound = self.lower_bound
212        result.upper_bound = self.upper_bound
213        result.linear_expression.CopyFrom(
214            sparse_containers.to_sparse_double_vector_proto(self.terms)
215        )
216        return result
217
218
219@dataclasses.dataclass
220class CallbackResult:
221    """The value returned by a solve callback (produced by the user).
222
223    Attributes:
224      terminate: Stop the solve process and return early. Can be called from any
225        event.
226      generated_constraints: Constraints to add to the model. For details, see
227        GeneratedConstraint documentation.
228      suggested_solutions: A list of solutions (or partially defined solutions) to
229        suggest to the solver. Some solvers (e.g. gurobi) will try and convert a
230        partial solution into a full solution by solving a MIP. Use only for
231        Event.MIP_NODE.
232    """
233
234    terminate: bool = False
235    generated_constraints: List[GeneratedConstraint] = dataclasses.field(
236        default_factory=list
237    )
238    suggested_solutions: List[Mapping[model.Variable, float]] = dataclasses.field(
239        default_factory=list
240    )
241
242    def add_generated_constraint(
243        self,
244        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
245        *,
246        lb: Optional[float] = None,
247        ub: Optional[float] = None,
248        expr: Optional[model.LinearTypes] = None,
249        is_lazy: bool,
250    ) -> None:
251        """Adds a linear constraint to the list of generated constraints.
252
253        The constraint can be of two exclusive types: a "lazy constraint" or a
254        "user cut. A "user cut" is a constraint that excludes the current LP
255        solution, but does not cut off any integer-feasible points that satisfy the
256        already added constraints (either in callbacks or through
257        Model.add_linear_constraint()). A "lazy constraint" is a constraint that
258        excludes such integer-feasible points and hence is needed for corrctness of
259        the forlumation.
260
261        The simplest way to specify the constraint is by passing a one-sided or
262        two-sided linear inequality as in:
263          * add_generated_constraint(x + y + 1.0 <= 2.0, is_lazy=True),
264          * add_generated_constraint(x + y >= 2.0, is_lazy=True), or
265          * add_generated_constraint((1.0 <= x + y) <= 2.0, is_lazy=True).
266
267        Note the extra parenthesis for two-sided linear inequalities, which is
268        required due to some language limitations (see
269        https://peps.python.org/pep-0335/ and https://peps.python.org/pep-0535/).
270        If the parenthesis are omitted, a TypeError will be raised explaining the
271        issue (if this error was not raised the first inequality would have been
272        silently ignored because of the noted language limitations).
273
274        The second way to specify the constraint is by setting lb, ub, and/o expr as
275        in:
276          * add_generated_constraint(expr=x + y + 1.0, ub=2.0, is_lazy=True),
277          * add_generated_constraint(expr=x + y, lb=2.0, is_lazy=True),
278          * add_generated_constraint(expr=x + y, lb=1.0, ub=2.0, is_lazy=True), or
279          * add_generated_constraint(lb=1.0, is_lazy=True).
280        Omitting lb is equivalent to setting it to -math.inf and omiting ub is
281        equivalent to setting it to math.inf.
282
283        These two alternatives are exclusive and a combined call like:
284          * add_generated_constraint(x + y <= 2.0, lb=1.0, is_lazy=True), or
285          * add_generated_constraint(x + y <= 2.0, ub=math.inf, is_lazy=True)
286        will raise a ValueError. A ValueError is also raised if expr's offset is
287        infinite.
288
289        Args:
290          bounded_expr: a linear inequality describing the constraint. Cannot be
291            specified together with lb, ub, or expr.
292          lb: The constraint's lower bound if bounded_expr is omitted (if both
293            bounder_expr and lb are omitted, the lower bound is -math.inf).
294          ub: The constraint's upper bound if bounded_expr is omitted (if both
295            bounder_expr and ub are omitted, the upper bound is math.inf).
296          expr: The constraint's linear expression if bounded_expr is omitted.
297          is_lazy: Whether the constraint is lazy or not.
298        """
299        normalized_inequality = model.as_normalized_linear_inequality(
300            bounded_expr, lb=lb, ub=ub, expr=expr
301        )
302        self.generated_constraints.append(
303            GeneratedConstraint(
304                lower_bound=normalized_inequality.lb,
305                terms=normalized_inequality.coefficients,
306                upper_bound=normalized_inequality.ub,
307                is_lazy=is_lazy,
308            )
309        )
310
311    def add_lazy_constraint(
312        self,
313        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
314        *,
315        lb: Optional[float] = None,
316        ub: Optional[float] = None,
317        expr: Optional[model.LinearTypes] = None,
318    ) -> None:
319        """Shortcut for add_generated_constraint(..., is_lazy=True).."""
320        self.add_generated_constraint(
321            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=True
322        )
323
324    def add_user_cut(
325        self,
326        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
327        *,
328        lb: Optional[float] = None,
329        ub: Optional[float] = None,
330        expr: Optional[model.LinearTypes] = None,
331    ) -> None:
332        """Shortcut for add_generated_constraint(..., is_lazy=False)."""
333        self.add_generated_constraint(
334            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=False
335        )
336
337    def to_proto(self) -> callback_pb2.CallbackResultProto:
338        """Returns a proto equivalent to this CallbackResult."""
339        result = callback_pb2.CallbackResultProto(terminate=self.terminate)
340        for generated_constraint in self.generated_constraints:
341            result.cuts.add().CopyFrom(generated_constraint.to_proto())
342        for suggested_solution in self.suggested_solutions:
343            result.suggested_solutions.add().CopyFrom(
344                sparse_containers.to_sparse_double_vector_proto(suggested_solution)
345            )
346        return result
@enum.unique
class Event(enum.Enum):
27@enum.unique
28class Event(enum.Enum):
29    """The supported events during a solve for callbacks.
30
31    * UNSPECIFIED: The event is unknown (typically an internal error).
32    * PRESOLVE: The solver is currently running presolve. Gurobi only.
33    * SIMPLEX: The solver is currently running the simplex method. Gurobi only.
34    * MIP: The solver is in the MIP loop (called periodically before starting a
35        new node). Useful for early termination. Note that this event does not
36        provide information on LP relaxations nor about new incumbent solutions.
37        Gurobi only.
38    * MIP_SOLUTION: Called every time a new MIP incumbent is found. Fully
39        supported by Gurobi, partially supported by CP-SAT (you can observe new
40        solutions, but not add lazy constraints).
41    * MIP_NODE: Called inside a MIP node. Note that there is no guarantee that the
42        callback function will be called on every node. That behavior is
43        solver-dependent. Gurobi only.
44
45        Disabling cuts using SolveParameters may interfere with this event being
46        called and/or adding cuts at this event, the behavior is solver specific.
47    * BARRIER: Called in each iterate of an interior point/barrier method. Gurobi
48        only.
49    """
50
51    UNSPECIFIED = callback_pb2.CALLBACK_EVENT_UNSPECIFIED
52    PRESOLVE = callback_pb2.CALLBACK_EVENT_PRESOLVE
53    SIMPLEX = callback_pb2.CALLBACK_EVENT_SIMPLEX
54    MIP = callback_pb2.CALLBACK_EVENT_MIP
55    MIP_SOLUTION = callback_pb2.CALLBACK_EVENT_MIP_SOLUTION
56    MIP_NODE = callback_pb2.CALLBACK_EVENT_MIP_NODE
57    BARRIER = callback_pb2.CALLBACK_EVENT_BARRIER

The supported events during a solve for callbacks.

  • UNSPECIFIED: The event is unknown (typically an internal error).
  • PRESOLVE: The solver is currently running presolve. Gurobi only.
  • SIMPLEX: The solver is currently running the simplex method. Gurobi only.
  • MIP: The solver is in the MIP loop (called periodically before starting a new node). Useful for early termination. Note that this event does not provide information on LP relaxations nor about new incumbent solutions. Gurobi only.
  • MIP_SOLUTION: Called every time a new MIP incumbent is found. Fully supported by Gurobi, partially supported by CP-SAT (you can observe new solutions, but not add lazy constraints).
  • MIP_NODE: Called inside a MIP node. Note that there is no guarantee that the callback function will be called on every node. That behavior is solver-dependent. Gurobi only.

    Disabling cuts using SolveParameters may interfere with this event being called and/or adding cuts at this event, the behavior is solver specific.

  • BARRIER: Called in each iterate of an interior point/barrier method. Gurobi only.
UNSPECIFIED = <Event.UNSPECIFIED: 0>
PRESOLVE = <Event.PRESOLVE: 1>
SIMPLEX = <Event.SIMPLEX: 2>
MIP = <Event.MIP: 3>
MIP_SOLUTION = <Event.MIP_SOLUTION: 4>
MIP_NODE = <Event.MIP_NODE: 5>
BARRIER = <Event.BARRIER: 6>
Inherited Members
enum.Enum
name
value
class PresolveStats(google.protobuf.message.Message):

Abstract base class for protocol messages.

Protocol message classes are almost always generated by the protocol compiler. These generated types subclass Message and implement the methods shown below.

PresolveStats(**kwargs)
498  def init(self, **kwargs):
499    self._cached_byte_size = 0
500    self._cached_byte_size_dirty = len(kwargs) > 0
501    self._fields = {}
502    # Contains a mapping from oneof field descriptors to the descriptor
503    # of the currently set field in that oneof field.
504    self._oneofs = {}
505
506    # _unknown_fields is () when empty for efficiency, and will be turned into
507    # a list if fields are added.
508    self._unknown_fields = ()
509    self._is_present_in_parent = False
510    self._listener = message_listener_mod.NullMessageListener()
511    self._listener_for_children = _Listener(self)
512    for field_name, field_value in kwargs.items():
513      field = _GetFieldByName(message_descriptor, field_name)
514      if field is None:
515        raise TypeError('%s() got an unexpected keyword argument "%s"' %
516                        (message_descriptor.name, field_name))
517      if field_value is None:
518        # field=None is the same as no field at all.
519        continue
520      if field.label == _FieldDescriptor.LABEL_REPEATED:
521        field_copy = field._default_constructor(self)
522        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
523          if _IsMapField(field):
524            if _IsMessageMapField(field):
525              for key in field_value:
526                field_copy[key].MergeFrom(field_value[key])
527            else:
528              field_copy.update(field_value)
529          else:
530            for val in field_value:
531              if isinstance(val, dict):
532                field_copy.add(**val)
533              else:
534                field_copy.add().MergeFrom(val)
535        else:  # Scalar
536          if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
537            field_value = [_GetIntegerEnumValue(field.enum_type, val)
538                           for val in field_value]
539          field_copy.extend(field_value)
540        self._fields[field] = field_copy
541      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
542        field_copy = field._default_constructor(self)
543        new_val = None
544        if isinstance(field_value, message_mod.Message):
545          new_val = field_value
546        elif isinstance(field_value, dict):
547          if field.message_type.full_name == _StructFullTypeName:
548            field_copy.Clear()
549            if len(field_value) == 1 and 'fields' in field_value:
550              try:
551                field_copy.update(field_value)
552              except:
553                # Fall back to init normal message field
554                field_copy.Clear()
555                new_val = field.message_type._concrete_class(**field_value)
556            else:
557              field_copy.update(field_value)
558          else:
559            new_val = field.message_type._concrete_class(**field_value)
560        elif hasattr(field_copy, '_internal_assign'):
561          field_copy._internal_assign(field_value)
562        else:
563          raise TypeError(
564              'Message field {0}.{1} must be initialized with a '
565              'dict or instance of same class, got {2}.'.format(
566                  message_descriptor.name,
567                  field_name,
568                  type(field_value).__name__,
569              )
570          )
571
572        if new_val:
573          try:
574            field_copy.MergeFrom(new_val)
575          except TypeError:
576            _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
577        self._fields[field] = field_copy
578      else:
579        if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
580          field_value = _GetIntegerEnumValue(field.enum_type, field_value)
581        try:
582          setattr(self, field_name, field_value)
583        except TypeError:
584          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
DESCRIPTOR = <google.protobuf.descriptor.Descriptor object>
def MergeFrom(self, msg):
1361  def MergeFrom(self, msg):
1362    if not isinstance(msg, cls):
1363      raise TypeError(
1364          'Parameter to MergeFrom() must be instance of same class: '
1365          'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1366                                   _FullyQualifiedClassName(msg.__class__)))
1367
1368    assert msg is not self
1369    self._Modified()
1370
1371    fields = self._fields
1372
1373    for field, value in msg._fields.items():
1374      if field.label == LABEL_REPEATED:
1375        field_value = fields.get(field)
1376        if field_value is None:
1377          # Construct a new object to represent this field.
1378          field_value = field._default_constructor(self)
1379          fields[field] = field_value
1380        field_value.MergeFrom(value)
1381      elif field.cpp_type == CPPTYPE_MESSAGE:
1382        if value._is_present_in_parent:
1383          field_value = fields.get(field)
1384          if field_value is None:
1385            # Construct a new object to represent this field.
1386            field_value = field._default_constructor(self)
1387            fields[field] = field_value
1388          field_value.MergeFrom(value)
1389      else:
1390        self._fields[field] = value
1391        if field.containing_oneof:
1392          self._UpdateOneofState(field)
1393
1394    if msg._unknown_fields:
1395      if not self._unknown_fields:
1396        self._unknown_fields = []
1397      self._unknown_fields.extend(msg._unknown_fields)

Merges the contents of the specified message into current message.

This method merges the contents of the specified message into the current message. Singular fields that are set in the specified message overwrite the corresponding fields in the current message. Repeated fields are appended. Singular sub-messages and groups are recursively merged.

Arguments:
  • other_msg (Message): A message to merge into the current message.
def Clear(self):
1420def _Clear(self):
1421  # Clear fields.
1422  self._fields = {}
1423  self._unknown_fields = ()
1424
1425  self._oneofs = {}
1426  self._Modified()

Clears all data that was set in the message.

def SetInParent(self):
1486  def Modified(self):
1487    """Sets the _cached_byte_size_dirty bit to true,
1488    and propagates this to our listener iff this was a state change.
1489    """
1490
1491    # Note:  Some callers check _cached_byte_size_dirty before calling
1492    #   _Modified() as an extra optimization.  So, if this method is ever
1493    #   changed such that it does stuff even when _cached_byte_size_dirty is
1494    #   already true, the callers need to be updated.
1495    if not self._cached_byte_size_dirty:
1496      self._cached_byte_size_dirty = True
1497      self._listener_for_children.dirty = True
1498      self._is_present_in_parent = True
1499      self._listener.Modified()

Sets the _cached_byte_size_dirty bit to true, and propagates this to our listener iff this was a state change.

def IsInitialized(self, errors=None):
1262  def IsInitialized(self, errors=None):
1263    """Checks if all required fields of a message are set.
1264
1265    Args:
1266      errors:  A list which, if provided, will be populated with the field
1267               paths of all missing required fields.
1268
1269    Returns:
1270      True iff the specified message has all required fields set.
1271    """
1272
1273    # Performance is critical so we avoid HasField() and ListFields().
1274
1275    for field in required_fields:
1276      if (field not in self._fields or
1277          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1278           not self._fields[field]._is_present_in_parent)):
1279        if errors is not None:
1280          errors.extend(self.FindInitializationErrors())
1281        return False
1282
1283    for field, value in list(self._fields.items()):  # dict can change size!
1284      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1285        if field.label == _FieldDescriptor.LABEL_REPEATED:
1286          if (field.message_type._is_map_entry):
1287            continue
1288          for element in value:
1289            if not element.IsInitialized():
1290              if errors is not None:
1291                errors.extend(self.FindInitializationErrors())
1292              return False
1293        elif value._is_present_in_parent and not value.IsInitialized():
1294          if errors is not None:
1295            errors.extend(self.FindInitializationErrors())
1296          return False
1297
1298    return True

Checks if all required fields of a message are set.

Arguments:
  • errors: A list which, if provided, will be populated with the field paths of all missing required fields.
Returns:

True iff the specified message has all required fields set.

def MergeFromString(self, serialized):
1177  def MergeFromString(self, serialized):
1178    serialized = memoryview(serialized)
1179    length = len(serialized)
1180    try:
1181      if self._InternalParse(serialized, 0, length) != length:
1182        # The only reason _InternalParse would return early is if it
1183        # encountered an end-group tag.
1184        raise message_mod.DecodeError('Unexpected end-group tag.')
1185    except (IndexError, TypeError):
1186      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1187      raise message_mod.DecodeError('Truncated message.')
1188    except struct.error as e:
1189      raise message_mod.DecodeError(e)
1190    return length   # Return this for legacy reasons.

Merges serialized protocol buffer data into this message.

When we find a field in serialized that is already present in this message:

  • If it's a "repeated" field, we append to the end of our list.
  • Else, if it's a scalar, we overwrite our field.
  • Else, (it's a nonrepeated composite), we recursively merge into the existing composite.
Arguments:
  • serialized (bytes): Any object that allows us to call memoryview(serialized) to access a string of bytes using the buffer interface.
Returns:

int: The number of bytes read from serialized. For non-group messages, this will always be len(serialized), but for messages which are actually groups, this will generally be less than len(serialized), since we must stop when we reach an END_GROUP tag. Note that if we do stop because of an END_GROUP tag, the number of bytes returned does not include the bytes for the END_GROUP tag information.

Raises:
  • DecodeError: if the input cannot be parsed.
def SerializeToString(self, **kwargs):
1130  def SerializeToString(self, **kwargs):
1131    # Check if the message has all of its required fields set.
1132    if not self.IsInitialized():
1133      raise message_mod.EncodeError(
1134          'Message %s is missing required fields: %s' % (
1135          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1136    return self.SerializePartialToString(**kwargs)

Serializes the protocol message to a binary string.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

A binary string representation of the message if all of the required fields in the message are set (i.e. the message is initialized).

Raises:
def SerializePartialToString(self, **kwargs):
1143  def SerializePartialToString(self, **kwargs):
1144    out = BytesIO()
1145    self._InternalSerialize(out.write, **kwargs)
1146    return out.getvalue()

Serializes the protocol message to a binary string.

This method is similar to SerializeToString but doesn't check if the message is initialized.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

bytes: A serialized representation of the partial message.

def ListFields(self):
848  def ListFields(self):
849    all_fields = [item for item in self._fields.items() if _IsPresent(item)]
850    all_fields.sort(key = lambda item: item[0].number)
851    return all_fields

Returns a list of (FieldDescriptor, value) tuples for present fields.

A message field is non-empty if HasField() would return true. A singular primitive field is non-empty if HasField() would return true in proto2 or it is non zero in proto3. A repeated field is non-empty if it contains at least one element. The fields are ordered by field number.

Returns:

list[tuple(FieldDescriptor, value)]: field descriptors and values for all fields in the message which are not empty. The values vary by field type.

def HasField(self, field_name):
872  def HasField(self, field_name):
873    try:
874      field = hassable_fields[field_name]
875    except KeyError as exc:
876      raise ValueError('Protocol message %s has no non-repeated field "%s" '
877                       'nor has presence is not available for this field.' % (
878                           message_descriptor.full_name, field_name)) from exc
879
880    if isinstance(field, descriptor_mod.OneofDescriptor):
881      try:
882        return HasField(self, self._oneofs[field].name)
883      except KeyError:
884        return False
885    else:
886      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
887        value = self._fields.get(field)
888        return value is not None and value._is_present_in_parent
889      else:
890        return field in self._fields

Checks if a certain field is set for the message.

For a oneof group, checks if any field inside is set. Note that if the field_name is not defined in the message descriptor, ValueError will be raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Returns:

bool: Whether a value has been set for the named field.

Raises:
  • ValueError: if the field_name is not a member of this message.
def ClearField(self, field_name):
897  def ClearField(self, field_name):
898    try:
899      field = message_descriptor.fields_by_name[field_name]
900    except KeyError:
901      try:
902        field = message_descriptor.oneofs_by_name[field_name]
903        if field in self._oneofs:
904          field = self._oneofs[field]
905        else:
906          return
907      except KeyError:
908        raise ValueError('Protocol message %s has no "%s" field.' %
909                         (message_descriptor.name, field_name))
910
911    if field in self._fields:
912      # To match the C++ implementation, we need to invalidate iterators
913      # for map fields when ClearField() happens.
914      if hasattr(self._fields[field], 'InvalidateIterators'):
915        self._fields[field].InvalidateIterators()
916
917      # Note:  If the field is a sub-message, its listener will still point
918      #   at us.  That's fine, because the worst than can happen is that it
919      #   will call _Modified() and invalidate our byte size.  Big deal.
920      del self._fields[field]
921
922      if self._oneofs.get(field.containing_oneof, None) is field:
923        del self._oneofs[field.containing_oneof]
924
925    # Always call _Modified() -- even if nothing was changed, this is
926    # a mutating method, and thus calling it should cause the field to become
927    # present in the parent message.
928    self._Modified()

Clears the contents of a given field.

Inside a oneof group, clears the field set. If the name neither refers to a defined field or oneof group, ValueError is raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Raises:
  • ValueError: if the field_name is not a member of this message.
def WhichOneof(self, oneof_name):
1403  def WhichOneof(self, oneof_name):
1404    """Returns the name of the currently set field inside a oneof, or None."""
1405    try:
1406      field = message_descriptor.oneofs_by_name[oneof_name]
1407    except KeyError:
1408      raise ValueError(
1409          'Protocol message has no oneof "%s" field.' % oneof_name)
1410
1411    nested_field = self._oneofs.get(field, None)
1412    if nested_field is not None and self.HasField(nested_field.name):
1413      return nested_field.name
1414    else:
1415      return None

Returns the name of the currently set field inside a oneof, or None.

def DiscardUnknownFields(self):
1435def _DiscardUnknownFields(self):
1436  self._unknown_fields = []
1437  for field, value in self.ListFields():
1438    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1439      if _IsMapField(field):
1440        if _IsMessageMapField(field):
1441          for key in value:
1442            value[key].DiscardUnknownFields()
1443      elif field.label == _FieldDescriptor.LABEL_REPEATED:
1444        for sub_message in value:
1445          sub_message.DiscardUnknownFields()
1446      else:
1447        value.DiscardUnknownFields()

Clears all fields in the UnknownFieldSet.

This operation is recursive for nested message.

def ByteSize(self):
1098  def ByteSize(self):
1099    if not self._cached_byte_size_dirty:
1100      return self._cached_byte_size
1101
1102    size = 0
1103    descriptor = self.DESCRIPTOR
1104    if descriptor._is_map_entry:
1105      # Fields of map entry should always be serialized.
1106      key_field = descriptor.fields_by_name['key']
1107      _MaybeAddEncoder(cls, key_field)
1108      size = key_field._sizer(self.key)
1109      value_field = descriptor.fields_by_name['value']
1110      _MaybeAddEncoder(cls, value_field)
1111      size += value_field._sizer(self.value)
1112    else:
1113      for field_descriptor, field_value in self.ListFields():
1114        _MaybeAddEncoder(cls, field_descriptor)
1115        size += field_descriptor._sizer(field_value)
1116      for tag_bytes, value_bytes in self._unknown_fields:
1117        size += len(tag_bytes) + len(value_bytes)
1118
1119    self._cached_byte_size = size
1120    self._cached_byte_size_dirty = False
1121    self._listener_for_children.dirty = False
1122    return size

Returns the serialized size of this message.

Recursively calls ByteSize() on all contained messages.

Returns:

int: The number of bytes required to serialize this message.

def FromString(s):
826  def FromString(s):
827    message = cls()
828    message.MergeFromString(s)
829    return message
REMOVED_VARIABLES_FIELD_NUMBER = 1
removed_variables
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for removed_variables.

REMOVED_CONSTRAINTS_FIELD_NUMBER = 2
removed_constraints
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for removed_constraints.

BOUND_CHANGES_FIELD_NUMBER = 3
bound_changes
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for bound_changes.

COEFFICIENT_CHANGES_FIELD_NUMBER = 4
coefficient_changes
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for coefficient_changes.

def FindInitializationErrors(self):
1302  def FindInitializationErrors(self):
1303    """Finds required fields which are not initialized.
1304
1305    Returns:
1306      A list of strings.  Each string is a path to an uninitialized field from
1307      the top-level message, e.g. "foo.bar[5].baz".
1308    """
1309
1310    errors = []  # simplify things
1311
1312    for field in required_fields:
1313      if not self.HasField(field.name):
1314        errors.append(field.name)
1315
1316    for field, value in self.ListFields():
1317      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1318        if field.is_extension:
1319          name = '(%s)' % field.full_name
1320        else:
1321          name = field.name
1322
1323        if _IsMapField(field):
1324          if _IsMessageMapField(field):
1325            for key in value:
1326              element = value[key]
1327              prefix = '%s[%s].' % (name, key)
1328              sub_errors = element.FindInitializationErrors()
1329              errors += [prefix + error for error in sub_errors]
1330          else:
1331            # ScalarMaps can't have any initialization errors.
1332            pass
1333        elif field.label == _FieldDescriptor.LABEL_REPEATED:
1334          for i in range(len(value)):
1335            element = value[i]
1336            prefix = '%s[%d].' % (name, i)
1337            sub_errors = element.FindInitializationErrors()
1338            errors += [prefix + error for error in sub_errors]
1339        else:
1340          prefix = name + '.'
1341          sub_errors = value.FindInitializationErrors()
1342          errors += [prefix + error for error in sub_errors]
1343
1344    return errors

Finds required fields which are not initialized.

Returns:

A list of strings. Each string is a path to an uninitialized field from the top-level message, e.g. "foo.bar[5].baz".

Inherited Members
google.protobuf.message.Message
CopyFrom
ParseFromString
HasExtension
ClearExtension
UnknownFields
class SimplexStats(google.protobuf.message.Message):

Abstract base class for protocol messages.

Protocol message classes are almost always generated by the protocol compiler. These generated types subclass Message and implement the methods shown below.

SimplexStats(**kwargs)
498  def init(self, **kwargs):
499    self._cached_byte_size = 0
500    self._cached_byte_size_dirty = len(kwargs) > 0
501    self._fields = {}
502    # Contains a mapping from oneof field descriptors to the descriptor
503    # of the currently set field in that oneof field.
504    self._oneofs = {}
505
506    # _unknown_fields is () when empty for efficiency, and will be turned into
507    # a list if fields are added.
508    self._unknown_fields = ()
509    self._is_present_in_parent = False
510    self._listener = message_listener_mod.NullMessageListener()
511    self._listener_for_children = _Listener(self)
512    for field_name, field_value in kwargs.items():
513      field = _GetFieldByName(message_descriptor, field_name)
514      if field is None:
515        raise TypeError('%s() got an unexpected keyword argument "%s"' %
516                        (message_descriptor.name, field_name))
517      if field_value is None:
518        # field=None is the same as no field at all.
519        continue
520      if field.label == _FieldDescriptor.LABEL_REPEATED:
521        field_copy = field._default_constructor(self)
522        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
523          if _IsMapField(field):
524            if _IsMessageMapField(field):
525              for key in field_value:
526                field_copy[key].MergeFrom(field_value[key])
527            else:
528              field_copy.update(field_value)
529          else:
530            for val in field_value:
531              if isinstance(val, dict):
532                field_copy.add(**val)
533              else:
534                field_copy.add().MergeFrom(val)
535        else:  # Scalar
536          if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
537            field_value = [_GetIntegerEnumValue(field.enum_type, val)
538                           for val in field_value]
539          field_copy.extend(field_value)
540        self._fields[field] = field_copy
541      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
542        field_copy = field._default_constructor(self)
543        new_val = None
544        if isinstance(field_value, message_mod.Message):
545          new_val = field_value
546        elif isinstance(field_value, dict):
547          if field.message_type.full_name == _StructFullTypeName:
548            field_copy.Clear()
549            if len(field_value) == 1 and 'fields' in field_value:
550              try:
551                field_copy.update(field_value)
552              except:
553                # Fall back to init normal message field
554                field_copy.Clear()
555                new_val = field.message_type._concrete_class(**field_value)
556            else:
557              field_copy.update(field_value)
558          else:
559            new_val = field.message_type._concrete_class(**field_value)
560        elif hasattr(field_copy, '_internal_assign'):
561          field_copy._internal_assign(field_value)
562        else:
563          raise TypeError(
564              'Message field {0}.{1} must be initialized with a '
565              'dict or instance of same class, got {2}.'.format(
566                  message_descriptor.name,
567                  field_name,
568                  type(field_value).__name__,
569              )
570          )
571
572        if new_val:
573          try:
574            field_copy.MergeFrom(new_val)
575          except TypeError:
576            _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
577        self._fields[field] = field_copy
578      else:
579        if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
580          field_value = _GetIntegerEnumValue(field.enum_type, field_value)
581        try:
582          setattr(self, field_name, field_value)
583        except TypeError:
584          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
DESCRIPTOR = <google.protobuf.descriptor.Descriptor object>
def MergeFrom(self, msg):
1361  def MergeFrom(self, msg):
1362    if not isinstance(msg, cls):
1363      raise TypeError(
1364          'Parameter to MergeFrom() must be instance of same class: '
1365          'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1366                                   _FullyQualifiedClassName(msg.__class__)))
1367
1368    assert msg is not self
1369    self._Modified()
1370
1371    fields = self._fields
1372
1373    for field, value in msg._fields.items():
1374      if field.label == LABEL_REPEATED:
1375        field_value = fields.get(field)
1376        if field_value is None:
1377          # Construct a new object to represent this field.
1378          field_value = field._default_constructor(self)
1379          fields[field] = field_value
1380        field_value.MergeFrom(value)
1381      elif field.cpp_type == CPPTYPE_MESSAGE:
1382        if value._is_present_in_parent:
1383          field_value = fields.get(field)
1384          if field_value is None:
1385            # Construct a new object to represent this field.
1386            field_value = field._default_constructor(self)
1387            fields[field] = field_value
1388          field_value.MergeFrom(value)
1389      else:
1390        self._fields[field] = value
1391        if field.containing_oneof:
1392          self._UpdateOneofState(field)
1393
1394    if msg._unknown_fields:
1395      if not self._unknown_fields:
1396        self._unknown_fields = []
1397      self._unknown_fields.extend(msg._unknown_fields)

Merges the contents of the specified message into current message.

This method merges the contents of the specified message into the current message. Singular fields that are set in the specified message overwrite the corresponding fields in the current message. Repeated fields are appended. Singular sub-messages and groups are recursively merged.

Arguments:
  • other_msg (Message): A message to merge into the current message.
def Clear(self):
1420def _Clear(self):
1421  # Clear fields.
1422  self._fields = {}
1423  self._unknown_fields = ()
1424
1425  self._oneofs = {}
1426  self._Modified()

Clears all data that was set in the message.

def SetInParent(self):
1486  def Modified(self):
1487    """Sets the _cached_byte_size_dirty bit to true,
1488    and propagates this to our listener iff this was a state change.
1489    """
1490
1491    # Note:  Some callers check _cached_byte_size_dirty before calling
1492    #   _Modified() as an extra optimization.  So, if this method is ever
1493    #   changed such that it does stuff even when _cached_byte_size_dirty is
1494    #   already true, the callers need to be updated.
1495    if not self._cached_byte_size_dirty:
1496      self._cached_byte_size_dirty = True
1497      self._listener_for_children.dirty = True
1498      self._is_present_in_parent = True
1499      self._listener.Modified()

Sets the _cached_byte_size_dirty bit to true, and propagates this to our listener iff this was a state change.

def IsInitialized(self, errors=None):
1262  def IsInitialized(self, errors=None):
1263    """Checks if all required fields of a message are set.
1264
1265    Args:
1266      errors:  A list which, if provided, will be populated with the field
1267               paths of all missing required fields.
1268
1269    Returns:
1270      True iff the specified message has all required fields set.
1271    """
1272
1273    # Performance is critical so we avoid HasField() and ListFields().
1274
1275    for field in required_fields:
1276      if (field not in self._fields or
1277          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1278           not self._fields[field]._is_present_in_parent)):
1279        if errors is not None:
1280          errors.extend(self.FindInitializationErrors())
1281        return False
1282
1283    for field, value in list(self._fields.items()):  # dict can change size!
1284      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1285        if field.label == _FieldDescriptor.LABEL_REPEATED:
1286          if (field.message_type._is_map_entry):
1287            continue
1288          for element in value:
1289            if not element.IsInitialized():
1290              if errors is not None:
1291                errors.extend(self.FindInitializationErrors())
1292              return False
1293        elif value._is_present_in_parent and not value.IsInitialized():
1294          if errors is not None:
1295            errors.extend(self.FindInitializationErrors())
1296          return False
1297
1298    return True

Checks if all required fields of a message are set.

Arguments:
  • errors: A list which, if provided, will be populated with the field paths of all missing required fields.
Returns:

True iff the specified message has all required fields set.

def MergeFromString(self, serialized):
1177  def MergeFromString(self, serialized):
1178    serialized = memoryview(serialized)
1179    length = len(serialized)
1180    try:
1181      if self._InternalParse(serialized, 0, length) != length:
1182        # The only reason _InternalParse would return early is if it
1183        # encountered an end-group tag.
1184        raise message_mod.DecodeError('Unexpected end-group tag.')
1185    except (IndexError, TypeError):
1186      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1187      raise message_mod.DecodeError('Truncated message.')
1188    except struct.error as e:
1189      raise message_mod.DecodeError(e)
1190    return length   # Return this for legacy reasons.

Merges serialized protocol buffer data into this message.

When we find a field in serialized that is already present in this message:

  • If it's a "repeated" field, we append to the end of our list.
  • Else, if it's a scalar, we overwrite our field.
  • Else, (it's a nonrepeated composite), we recursively merge into the existing composite.
Arguments:
  • serialized (bytes): Any object that allows us to call memoryview(serialized) to access a string of bytes using the buffer interface.
Returns:

int: The number of bytes read from serialized. For non-group messages, this will always be len(serialized), but for messages which are actually groups, this will generally be less than len(serialized), since we must stop when we reach an END_GROUP tag. Note that if we do stop because of an END_GROUP tag, the number of bytes returned does not include the bytes for the END_GROUP tag information.

Raises:
  • DecodeError: if the input cannot be parsed.
def SerializeToString(self, **kwargs):
1130  def SerializeToString(self, **kwargs):
1131    # Check if the message has all of its required fields set.
1132    if not self.IsInitialized():
1133      raise message_mod.EncodeError(
1134          'Message %s is missing required fields: %s' % (
1135          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1136    return self.SerializePartialToString(**kwargs)

Serializes the protocol message to a binary string.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

A binary string representation of the message if all of the required fields in the message are set (i.e. the message is initialized).

Raises:
def SerializePartialToString(self, **kwargs):
1143  def SerializePartialToString(self, **kwargs):
1144    out = BytesIO()
1145    self._InternalSerialize(out.write, **kwargs)
1146    return out.getvalue()

Serializes the protocol message to a binary string.

This method is similar to SerializeToString but doesn't check if the message is initialized.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

bytes: A serialized representation of the partial message.

def ListFields(self):
848  def ListFields(self):
849    all_fields = [item for item in self._fields.items() if _IsPresent(item)]
850    all_fields.sort(key = lambda item: item[0].number)
851    return all_fields

Returns a list of (FieldDescriptor, value) tuples for present fields.

A message field is non-empty if HasField() would return true. A singular primitive field is non-empty if HasField() would return true in proto2 or it is non zero in proto3. A repeated field is non-empty if it contains at least one element. The fields are ordered by field number.

Returns:

list[tuple(FieldDescriptor, value)]: field descriptors and values for all fields in the message which are not empty. The values vary by field type.

def HasField(self, field_name):
872  def HasField(self, field_name):
873    try:
874      field = hassable_fields[field_name]
875    except KeyError as exc:
876      raise ValueError('Protocol message %s has no non-repeated field "%s" '
877                       'nor has presence is not available for this field.' % (
878                           message_descriptor.full_name, field_name)) from exc
879
880    if isinstance(field, descriptor_mod.OneofDescriptor):
881      try:
882        return HasField(self, self._oneofs[field].name)
883      except KeyError:
884        return False
885    else:
886      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
887        value = self._fields.get(field)
888        return value is not None and value._is_present_in_parent
889      else:
890        return field in self._fields

Checks if a certain field is set for the message.

For a oneof group, checks if any field inside is set. Note that if the field_name is not defined in the message descriptor, ValueError will be raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Returns:

bool: Whether a value has been set for the named field.

Raises:
  • ValueError: if the field_name is not a member of this message.
def ClearField(self, field_name):
897  def ClearField(self, field_name):
898    try:
899      field = message_descriptor.fields_by_name[field_name]
900    except KeyError:
901      try:
902        field = message_descriptor.oneofs_by_name[field_name]
903        if field in self._oneofs:
904          field = self._oneofs[field]
905        else:
906          return
907      except KeyError:
908        raise ValueError('Protocol message %s has no "%s" field.' %
909                         (message_descriptor.name, field_name))
910
911    if field in self._fields:
912      # To match the C++ implementation, we need to invalidate iterators
913      # for map fields when ClearField() happens.
914      if hasattr(self._fields[field], 'InvalidateIterators'):
915        self._fields[field].InvalidateIterators()
916
917      # Note:  If the field is a sub-message, its listener will still point
918      #   at us.  That's fine, because the worst than can happen is that it
919      #   will call _Modified() and invalidate our byte size.  Big deal.
920      del self._fields[field]
921
922      if self._oneofs.get(field.containing_oneof, None) is field:
923        del self._oneofs[field.containing_oneof]
924
925    # Always call _Modified() -- even if nothing was changed, this is
926    # a mutating method, and thus calling it should cause the field to become
927    # present in the parent message.
928    self._Modified()

Clears the contents of a given field.

Inside a oneof group, clears the field set. If the name neither refers to a defined field or oneof group, ValueError is raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Raises:
  • ValueError: if the field_name is not a member of this message.
def WhichOneof(self, oneof_name):
1403  def WhichOneof(self, oneof_name):
1404    """Returns the name of the currently set field inside a oneof, or None."""
1405    try:
1406      field = message_descriptor.oneofs_by_name[oneof_name]
1407    except KeyError:
1408      raise ValueError(
1409          'Protocol message has no oneof "%s" field.' % oneof_name)
1410
1411    nested_field = self._oneofs.get(field, None)
1412    if nested_field is not None and self.HasField(nested_field.name):
1413      return nested_field.name
1414    else:
1415      return None

Returns the name of the currently set field inside a oneof, or None.

def DiscardUnknownFields(self):
1435def _DiscardUnknownFields(self):
1436  self._unknown_fields = []
1437  for field, value in self.ListFields():
1438    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1439      if _IsMapField(field):
1440        if _IsMessageMapField(field):
1441          for key in value:
1442            value[key].DiscardUnknownFields()
1443      elif field.label == _FieldDescriptor.LABEL_REPEATED:
1444        for sub_message in value:
1445          sub_message.DiscardUnknownFields()
1446      else:
1447        value.DiscardUnknownFields()

Clears all fields in the UnknownFieldSet.

This operation is recursive for nested message.

def ByteSize(self):
1098  def ByteSize(self):
1099    if not self._cached_byte_size_dirty:
1100      return self._cached_byte_size
1101
1102    size = 0
1103    descriptor = self.DESCRIPTOR
1104    if descriptor._is_map_entry:
1105      # Fields of map entry should always be serialized.
1106      key_field = descriptor.fields_by_name['key']
1107      _MaybeAddEncoder(cls, key_field)
1108      size = key_field._sizer(self.key)
1109      value_field = descriptor.fields_by_name['value']
1110      _MaybeAddEncoder(cls, value_field)
1111      size += value_field._sizer(self.value)
1112    else:
1113      for field_descriptor, field_value in self.ListFields():
1114        _MaybeAddEncoder(cls, field_descriptor)
1115        size += field_descriptor._sizer(field_value)
1116      for tag_bytes, value_bytes in self._unknown_fields:
1117        size += len(tag_bytes) + len(value_bytes)
1118
1119    self._cached_byte_size = size
1120    self._cached_byte_size_dirty = False
1121    self._listener_for_children.dirty = False
1122    return size

Returns the serialized size of this message.

Recursively calls ByteSize() on all contained messages.

Returns:

int: The number of bytes required to serialize this message.

def FromString(s):
826  def FromString(s):
827    message = cls()
828    message.MergeFromString(s)
829    return message
ITERATION_COUNT_FIELD_NUMBER = 1
iteration_count
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for iteration_count.

OBJECTIVE_VALUE_FIELD_NUMBER = 2
objective_value
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for objective_value.

PRIMAL_INFEASIBILITY_FIELD_NUMBER = 3
primal_infeasibility
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for primal_infeasibility.

DUAL_INFEASIBILITY_FIELD_NUMBER = 4
dual_infeasibility
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for dual_infeasibility.

IS_PERTUBATED_FIELD_NUMBER = 5
is_pertubated
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for is_pertubated.

def FindInitializationErrors(self):
1302  def FindInitializationErrors(self):
1303    """Finds required fields which are not initialized.
1304
1305    Returns:
1306      A list of strings.  Each string is a path to an uninitialized field from
1307      the top-level message, e.g. "foo.bar[5].baz".
1308    """
1309
1310    errors = []  # simplify things
1311
1312    for field in required_fields:
1313      if not self.HasField(field.name):
1314        errors.append(field.name)
1315
1316    for field, value in self.ListFields():
1317      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1318        if field.is_extension:
1319          name = '(%s)' % field.full_name
1320        else:
1321          name = field.name
1322
1323        if _IsMapField(field):
1324          if _IsMessageMapField(field):
1325            for key in value:
1326              element = value[key]
1327              prefix = '%s[%s].' % (name, key)
1328              sub_errors = element.FindInitializationErrors()
1329              errors += [prefix + error for error in sub_errors]
1330          else:
1331            # ScalarMaps can't have any initialization errors.
1332            pass
1333        elif field.label == _FieldDescriptor.LABEL_REPEATED:
1334          for i in range(len(value)):
1335            element = value[i]
1336            prefix = '%s[%d].' % (name, i)
1337            sub_errors = element.FindInitializationErrors()
1338            errors += [prefix + error for error in sub_errors]
1339        else:
1340          prefix = name + '.'
1341          sub_errors = value.FindInitializationErrors()
1342          errors += [prefix + error for error in sub_errors]
1343
1344    return errors

Finds required fields which are not initialized.

Returns:

A list of strings. Each string is a path to an uninitialized field from the top-level message, e.g. "foo.bar[5].baz".

Inherited Members
google.protobuf.message.Message
CopyFrom
ParseFromString
HasExtension
ClearExtension
UnknownFields
class BarrierStats(google.protobuf.message.Message):

Abstract base class for protocol messages.

Protocol message classes are almost always generated by the protocol compiler. These generated types subclass Message and implement the methods shown below.

BarrierStats(**kwargs)
498  def init(self, **kwargs):
499    self._cached_byte_size = 0
500    self._cached_byte_size_dirty = len(kwargs) > 0
501    self._fields = {}
502    # Contains a mapping from oneof field descriptors to the descriptor
503    # of the currently set field in that oneof field.
504    self._oneofs = {}
505
506    # _unknown_fields is () when empty for efficiency, and will be turned into
507    # a list if fields are added.
508    self._unknown_fields = ()
509    self._is_present_in_parent = False
510    self._listener = message_listener_mod.NullMessageListener()
511    self._listener_for_children = _Listener(self)
512    for field_name, field_value in kwargs.items():
513      field = _GetFieldByName(message_descriptor, field_name)
514      if field is None:
515        raise TypeError('%s() got an unexpected keyword argument "%s"' %
516                        (message_descriptor.name, field_name))
517      if field_value is None:
518        # field=None is the same as no field at all.
519        continue
520      if field.label == _FieldDescriptor.LABEL_REPEATED:
521        field_copy = field._default_constructor(self)
522        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
523          if _IsMapField(field):
524            if _IsMessageMapField(field):
525              for key in field_value:
526                field_copy[key].MergeFrom(field_value[key])
527            else:
528              field_copy.update(field_value)
529          else:
530            for val in field_value:
531              if isinstance(val, dict):
532                field_copy.add(**val)
533              else:
534                field_copy.add().MergeFrom(val)
535        else:  # Scalar
536          if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
537            field_value = [_GetIntegerEnumValue(field.enum_type, val)
538                           for val in field_value]
539          field_copy.extend(field_value)
540        self._fields[field] = field_copy
541      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
542        field_copy = field._default_constructor(self)
543        new_val = None
544        if isinstance(field_value, message_mod.Message):
545          new_val = field_value
546        elif isinstance(field_value, dict):
547          if field.message_type.full_name == _StructFullTypeName:
548            field_copy.Clear()
549            if len(field_value) == 1 and 'fields' in field_value:
550              try:
551                field_copy.update(field_value)
552              except:
553                # Fall back to init normal message field
554                field_copy.Clear()
555                new_val = field.message_type._concrete_class(**field_value)
556            else:
557              field_copy.update(field_value)
558          else:
559            new_val = field.message_type._concrete_class(**field_value)
560        elif hasattr(field_copy, '_internal_assign'):
561          field_copy._internal_assign(field_value)
562        else:
563          raise TypeError(
564              'Message field {0}.{1} must be initialized with a '
565              'dict or instance of same class, got {2}.'.format(
566                  message_descriptor.name,
567                  field_name,
568                  type(field_value).__name__,
569              )
570          )
571
572        if new_val:
573          try:
574            field_copy.MergeFrom(new_val)
575          except TypeError:
576            _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
577        self._fields[field] = field_copy
578      else:
579        if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
580          field_value = _GetIntegerEnumValue(field.enum_type, field_value)
581        try:
582          setattr(self, field_name, field_value)
583        except TypeError:
584          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
DESCRIPTOR = <google.protobuf.descriptor.Descriptor object>
def MergeFrom(self, msg):
1361  def MergeFrom(self, msg):
1362    if not isinstance(msg, cls):
1363      raise TypeError(
1364          'Parameter to MergeFrom() must be instance of same class: '
1365          'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1366                                   _FullyQualifiedClassName(msg.__class__)))
1367
1368    assert msg is not self
1369    self._Modified()
1370
1371    fields = self._fields
1372
1373    for field, value in msg._fields.items():
1374      if field.label == LABEL_REPEATED:
1375        field_value = fields.get(field)
1376        if field_value is None:
1377          # Construct a new object to represent this field.
1378          field_value = field._default_constructor(self)
1379          fields[field] = field_value
1380        field_value.MergeFrom(value)
1381      elif field.cpp_type == CPPTYPE_MESSAGE:
1382        if value._is_present_in_parent:
1383          field_value = fields.get(field)
1384          if field_value is None:
1385            # Construct a new object to represent this field.
1386            field_value = field._default_constructor(self)
1387            fields[field] = field_value
1388          field_value.MergeFrom(value)
1389      else:
1390        self._fields[field] = value
1391        if field.containing_oneof:
1392          self._UpdateOneofState(field)
1393
1394    if msg._unknown_fields:
1395      if not self._unknown_fields:
1396        self._unknown_fields = []
1397      self._unknown_fields.extend(msg._unknown_fields)

Merges the contents of the specified message into current message.

This method merges the contents of the specified message into the current message. Singular fields that are set in the specified message overwrite the corresponding fields in the current message. Repeated fields are appended. Singular sub-messages and groups are recursively merged.

Arguments:
  • other_msg (Message): A message to merge into the current message.
def Clear(self):
1420def _Clear(self):
1421  # Clear fields.
1422  self._fields = {}
1423  self._unknown_fields = ()
1424
1425  self._oneofs = {}
1426  self._Modified()

Clears all data that was set in the message.

def SetInParent(self):
1486  def Modified(self):
1487    """Sets the _cached_byte_size_dirty bit to true,
1488    and propagates this to our listener iff this was a state change.
1489    """
1490
1491    # Note:  Some callers check _cached_byte_size_dirty before calling
1492    #   _Modified() as an extra optimization.  So, if this method is ever
1493    #   changed such that it does stuff even when _cached_byte_size_dirty is
1494    #   already true, the callers need to be updated.
1495    if not self._cached_byte_size_dirty:
1496      self._cached_byte_size_dirty = True
1497      self._listener_for_children.dirty = True
1498      self._is_present_in_parent = True
1499      self._listener.Modified()

Sets the _cached_byte_size_dirty bit to true, and propagates this to our listener iff this was a state change.

def IsInitialized(self, errors=None):
1262  def IsInitialized(self, errors=None):
1263    """Checks if all required fields of a message are set.
1264
1265    Args:
1266      errors:  A list which, if provided, will be populated with the field
1267               paths of all missing required fields.
1268
1269    Returns:
1270      True iff the specified message has all required fields set.
1271    """
1272
1273    # Performance is critical so we avoid HasField() and ListFields().
1274
1275    for field in required_fields:
1276      if (field not in self._fields or
1277          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1278           not self._fields[field]._is_present_in_parent)):
1279        if errors is not None:
1280          errors.extend(self.FindInitializationErrors())
1281        return False
1282
1283    for field, value in list(self._fields.items()):  # dict can change size!
1284      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1285        if field.label == _FieldDescriptor.LABEL_REPEATED:
1286          if (field.message_type._is_map_entry):
1287            continue
1288          for element in value:
1289            if not element.IsInitialized():
1290              if errors is not None:
1291                errors.extend(self.FindInitializationErrors())
1292              return False
1293        elif value._is_present_in_parent and not value.IsInitialized():
1294          if errors is not None:
1295            errors.extend(self.FindInitializationErrors())
1296          return False
1297
1298    return True

Checks if all required fields of a message are set.

Arguments:
  • errors: A list which, if provided, will be populated with the field paths of all missing required fields.
Returns:

True iff the specified message has all required fields set.

def MergeFromString(self, serialized):
1177  def MergeFromString(self, serialized):
1178    serialized = memoryview(serialized)
1179    length = len(serialized)
1180    try:
1181      if self._InternalParse(serialized, 0, length) != length:
1182        # The only reason _InternalParse would return early is if it
1183        # encountered an end-group tag.
1184        raise message_mod.DecodeError('Unexpected end-group tag.')
1185    except (IndexError, TypeError):
1186      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1187      raise message_mod.DecodeError('Truncated message.')
1188    except struct.error as e:
1189      raise message_mod.DecodeError(e)
1190    return length   # Return this for legacy reasons.

Merges serialized protocol buffer data into this message.

When we find a field in serialized that is already present in this message:

  • If it's a "repeated" field, we append to the end of our list.
  • Else, if it's a scalar, we overwrite our field.
  • Else, (it's a nonrepeated composite), we recursively merge into the existing composite.
Arguments:
  • serialized (bytes): Any object that allows us to call memoryview(serialized) to access a string of bytes using the buffer interface.
Returns:

int: The number of bytes read from serialized. For non-group messages, this will always be len(serialized), but for messages which are actually groups, this will generally be less than len(serialized), since we must stop when we reach an END_GROUP tag. Note that if we do stop because of an END_GROUP tag, the number of bytes returned does not include the bytes for the END_GROUP tag information.

Raises:
  • DecodeError: if the input cannot be parsed.
def SerializeToString(self, **kwargs):
1130  def SerializeToString(self, **kwargs):
1131    # Check if the message has all of its required fields set.
1132    if not self.IsInitialized():
1133      raise message_mod.EncodeError(
1134          'Message %s is missing required fields: %s' % (
1135          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1136    return self.SerializePartialToString(**kwargs)

Serializes the protocol message to a binary string.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

A binary string representation of the message if all of the required fields in the message are set (i.e. the message is initialized).

Raises:
def SerializePartialToString(self, **kwargs):
1143  def SerializePartialToString(self, **kwargs):
1144    out = BytesIO()
1145    self._InternalSerialize(out.write, **kwargs)
1146    return out.getvalue()

Serializes the protocol message to a binary string.

This method is similar to SerializeToString but doesn't check if the message is initialized.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

bytes: A serialized representation of the partial message.

def ListFields(self):
848  def ListFields(self):
849    all_fields = [item for item in self._fields.items() if _IsPresent(item)]
850    all_fields.sort(key = lambda item: item[0].number)
851    return all_fields

Returns a list of (FieldDescriptor, value) tuples for present fields.

A message field is non-empty if HasField() would return true. A singular primitive field is non-empty if HasField() would return true in proto2 or it is non zero in proto3. A repeated field is non-empty if it contains at least one element. The fields are ordered by field number.

Returns:

list[tuple(FieldDescriptor, value)]: field descriptors and values for all fields in the message which are not empty. The values vary by field type.

def HasField(self, field_name):
872  def HasField(self, field_name):
873    try:
874      field = hassable_fields[field_name]
875    except KeyError as exc:
876      raise ValueError('Protocol message %s has no non-repeated field "%s" '
877                       'nor has presence is not available for this field.' % (
878                           message_descriptor.full_name, field_name)) from exc
879
880    if isinstance(field, descriptor_mod.OneofDescriptor):
881      try:
882        return HasField(self, self._oneofs[field].name)
883      except KeyError:
884        return False
885    else:
886      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
887        value = self._fields.get(field)
888        return value is not None and value._is_present_in_parent
889      else:
890        return field in self._fields

Checks if a certain field is set for the message.

For a oneof group, checks if any field inside is set. Note that if the field_name is not defined in the message descriptor, ValueError will be raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Returns:

bool: Whether a value has been set for the named field.

Raises:
  • ValueError: if the field_name is not a member of this message.
def ClearField(self, field_name):
897  def ClearField(self, field_name):
898    try:
899      field = message_descriptor.fields_by_name[field_name]
900    except KeyError:
901      try:
902        field = message_descriptor.oneofs_by_name[field_name]
903        if field in self._oneofs:
904          field = self._oneofs[field]
905        else:
906          return
907      except KeyError:
908        raise ValueError('Protocol message %s has no "%s" field.' %
909                         (message_descriptor.name, field_name))
910
911    if field in self._fields:
912      # To match the C++ implementation, we need to invalidate iterators
913      # for map fields when ClearField() happens.
914      if hasattr(self._fields[field], 'InvalidateIterators'):
915        self._fields[field].InvalidateIterators()
916
917      # Note:  If the field is a sub-message, its listener will still point
918      #   at us.  That's fine, because the worst than can happen is that it
919      #   will call _Modified() and invalidate our byte size.  Big deal.
920      del self._fields[field]
921
922      if self._oneofs.get(field.containing_oneof, None) is field:
923        del self._oneofs[field.containing_oneof]
924
925    # Always call _Modified() -- even if nothing was changed, this is
926    # a mutating method, and thus calling it should cause the field to become
927    # present in the parent message.
928    self._Modified()

Clears the contents of a given field.

Inside a oneof group, clears the field set. If the name neither refers to a defined field or oneof group, ValueError is raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Raises:
  • ValueError: if the field_name is not a member of this message.
def WhichOneof(self, oneof_name):
1403  def WhichOneof(self, oneof_name):
1404    """Returns the name of the currently set field inside a oneof, or None."""
1405    try:
1406      field = message_descriptor.oneofs_by_name[oneof_name]
1407    except KeyError:
1408      raise ValueError(
1409          'Protocol message has no oneof "%s" field.' % oneof_name)
1410
1411    nested_field = self._oneofs.get(field, None)
1412    if nested_field is not None and self.HasField(nested_field.name):
1413      return nested_field.name
1414    else:
1415      return None

Returns the name of the currently set field inside a oneof, or None.

def DiscardUnknownFields(self):
1435def _DiscardUnknownFields(self):
1436  self._unknown_fields = []
1437  for field, value in self.ListFields():
1438    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1439      if _IsMapField(field):
1440        if _IsMessageMapField(field):
1441          for key in value:
1442            value[key].DiscardUnknownFields()
1443      elif field.label == _FieldDescriptor.LABEL_REPEATED:
1444        for sub_message in value:
1445          sub_message.DiscardUnknownFields()
1446      else:
1447        value.DiscardUnknownFields()

Clears all fields in the UnknownFieldSet.

This operation is recursive for nested message.

def ByteSize(self):
1098  def ByteSize(self):
1099    if not self._cached_byte_size_dirty:
1100      return self._cached_byte_size
1101
1102    size = 0
1103    descriptor = self.DESCRIPTOR
1104    if descriptor._is_map_entry:
1105      # Fields of map entry should always be serialized.
1106      key_field = descriptor.fields_by_name['key']
1107      _MaybeAddEncoder(cls, key_field)
1108      size = key_field._sizer(self.key)
1109      value_field = descriptor.fields_by_name['value']
1110      _MaybeAddEncoder(cls, value_field)
1111      size += value_field._sizer(self.value)
1112    else:
1113      for field_descriptor, field_value in self.ListFields():
1114        _MaybeAddEncoder(cls, field_descriptor)
1115        size += field_descriptor._sizer(field_value)
1116      for tag_bytes, value_bytes in self._unknown_fields:
1117        size += len(tag_bytes) + len(value_bytes)
1118
1119    self._cached_byte_size = size
1120    self._cached_byte_size_dirty = False
1121    self._listener_for_children.dirty = False
1122    return size

Returns the serialized size of this message.

Recursively calls ByteSize() on all contained messages.

Returns:

int: The number of bytes required to serialize this message.

def FromString(s):
826  def FromString(s):
827    message = cls()
828    message.MergeFromString(s)
829    return message
ITERATION_COUNT_FIELD_NUMBER = 1
iteration_count
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for iteration_count.

PRIMAL_OBJECTIVE_FIELD_NUMBER = 2
primal_objective
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for primal_objective.

DUAL_OBJECTIVE_FIELD_NUMBER = 3
dual_objective
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for dual_objective.

COMPLEMENTARITY_FIELD_NUMBER = 4
complementarity
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for complementarity.

PRIMAL_INFEASIBILITY_FIELD_NUMBER = 5
primal_infeasibility
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for primal_infeasibility.

DUAL_INFEASIBILITY_FIELD_NUMBER = 6
dual_infeasibility
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for dual_infeasibility.

def FindInitializationErrors(self):
1302  def FindInitializationErrors(self):
1303    """Finds required fields which are not initialized.
1304
1305    Returns:
1306      A list of strings.  Each string is a path to an uninitialized field from
1307      the top-level message, e.g. "foo.bar[5].baz".
1308    """
1309
1310    errors = []  # simplify things
1311
1312    for field in required_fields:
1313      if not self.HasField(field.name):
1314        errors.append(field.name)
1315
1316    for field, value in self.ListFields():
1317      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1318        if field.is_extension:
1319          name = '(%s)' % field.full_name
1320        else:
1321          name = field.name
1322
1323        if _IsMapField(field):
1324          if _IsMessageMapField(field):
1325            for key in value:
1326              element = value[key]
1327              prefix = '%s[%s].' % (name, key)
1328              sub_errors = element.FindInitializationErrors()
1329              errors += [prefix + error for error in sub_errors]
1330          else:
1331            # ScalarMaps can't have any initialization errors.
1332            pass
1333        elif field.label == _FieldDescriptor.LABEL_REPEATED:
1334          for i in range(len(value)):
1335            element = value[i]
1336            prefix = '%s[%d].' % (name, i)
1337            sub_errors = element.FindInitializationErrors()
1338            errors += [prefix + error for error in sub_errors]
1339        else:
1340          prefix = name + '.'
1341          sub_errors = value.FindInitializationErrors()
1342          errors += [prefix + error for error in sub_errors]
1343
1344    return errors

Finds required fields which are not initialized.

Returns:

A list of strings. Each string is a path to an uninitialized field from the top-level message, e.g. "foo.bar[5].baz".

Inherited Members
google.protobuf.message.Message
CopyFrom
ParseFromString
HasExtension
ClearExtension
UnknownFields
class MipStats(google.protobuf.message.Message):

Abstract base class for protocol messages.

Protocol message classes are almost always generated by the protocol compiler. These generated types subclass Message and implement the methods shown below.

MipStats(**kwargs)
498  def init(self, **kwargs):
499    self._cached_byte_size = 0
500    self._cached_byte_size_dirty = len(kwargs) > 0
501    self._fields = {}
502    # Contains a mapping from oneof field descriptors to the descriptor
503    # of the currently set field in that oneof field.
504    self._oneofs = {}
505
506    # _unknown_fields is () when empty for efficiency, and will be turned into
507    # a list if fields are added.
508    self._unknown_fields = ()
509    self._is_present_in_parent = False
510    self._listener = message_listener_mod.NullMessageListener()
511    self._listener_for_children = _Listener(self)
512    for field_name, field_value in kwargs.items():
513      field = _GetFieldByName(message_descriptor, field_name)
514      if field is None:
515        raise TypeError('%s() got an unexpected keyword argument "%s"' %
516                        (message_descriptor.name, field_name))
517      if field_value is None:
518        # field=None is the same as no field at all.
519        continue
520      if field.label == _FieldDescriptor.LABEL_REPEATED:
521        field_copy = field._default_constructor(self)
522        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
523          if _IsMapField(field):
524            if _IsMessageMapField(field):
525              for key in field_value:
526                field_copy[key].MergeFrom(field_value[key])
527            else:
528              field_copy.update(field_value)
529          else:
530            for val in field_value:
531              if isinstance(val, dict):
532                field_copy.add(**val)
533              else:
534                field_copy.add().MergeFrom(val)
535        else:  # Scalar
536          if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
537            field_value = [_GetIntegerEnumValue(field.enum_type, val)
538                           for val in field_value]
539          field_copy.extend(field_value)
540        self._fields[field] = field_copy
541      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
542        field_copy = field._default_constructor(self)
543        new_val = None
544        if isinstance(field_value, message_mod.Message):
545          new_val = field_value
546        elif isinstance(field_value, dict):
547          if field.message_type.full_name == _StructFullTypeName:
548            field_copy.Clear()
549            if len(field_value) == 1 and 'fields' in field_value:
550              try:
551                field_copy.update(field_value)
552              except:
553                # Fall back to init normal message field
554                field_copy.Clear()
555                new_val = field.message_type._concrete_class(**field_value)
556            else:
557              field_copy.update(field_value)
558          else:
559            new_val = field.message_type._concrete_class(**field_value)
560        elif hasattr(field_copy, '_internal_assign'):
561          field_copy._internal_assign(field_value)
562        else:
563          raise TypeError(
564              'Message field {0}.{1} must be initialized with a '
565              'dict or instance of same class, got {2}.'.format(
566                  message_descriptor.name,
567                  field_name,
568                  type(field_value).__name__,
569              )
570          )
571
572        if new_val:
573          try:
574            field_copy.MergeFrom(new_val)
575          except TypeError:
576            _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
577        self._fields[field] = field_copy
578      else:
579        if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
580          field_value = _GetIntegerEnumValue(field.enum_type, field_value)
581        try:
582          setattr(self, field_name, field_value)
583        except TypeError:
584          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
DESCRIPTOR = <google.protobuf.descriptor.Descriptor object>
def MergeFrom(self, msg):
1361  def MergeFrom(self, msg):
1362    if not isinstance(msg, cls):
1363      raise TypeError(
1364          'Parameter to MergeFrom() must be instance of same class: '
1365          'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1366                                   _FullyQualifiedClassName(msg.__class__)))
1367
1368    assert msg is not self
1369    self._Modified()
1370
1371    fields = self._fields
1372
1373    for field, value in msg._fields.items():
1374      if field.label == LABEL_REPEATED:
1375        field_value = fields.get(field)
1376        if field_value is None:
1377          # Construct a new object to represent this field.
1378          field_value = field._default_constructor(self)
1379          fields[field] = field_value
1380        field_value.MergeFrom(value)
1381      elif field.cpp_type == CPPTYPE_MESSAGE:
1382        if value._is_present_in_parent:
1383          field_value = fields.get(field)
1384          if field_value is None:
1385            # Construct a new object to represent this field.
1386            field_value = field._default_constructor(self)
1387            fields[field] = field_value
1388          field_value.MergeFrom(value)
1389      else:
1390        self._fields[field] = value
1391        if field.containing_oneof:
1392          self._UpdateOneofState(field)
1393
1394    if msg._unknown_fields:
1395      if not self._unknown_fields:
1396        self._unknown_fields = []
1397      self._unknown_fields.extend(msg._unknown_fields)

Merges the contents of the specified message into current message.

This method merges the contents of the specified message into the current message. Singular fields that are set in the specified message overwrite the corresponding fields in the current message. Repeated fields are appended. Singular sub-messages and groups are recursively merged.

Arguments:
  • other_msg (Message): A message to merge into the current message.
def Clear(self):
1420def _Clear(self):
1421  # Clear fields.
1422  self._fields = {}
1423  self._unknown_fields = ()
1424
1425  self._oneofs = {}
1426  self._Modified()

Clears all data that was set in the message.

def SetInParent(self):
1486  def Modified(self):
1487    """Sets the _cached_byte_size_dirty bit to true,
1488    and propagates this to our listener iff this was a state change.
1489    """
1490
1491    # Note:  Some callers check _cached_byte_size_dirty before calling
1492    #   _Modified() as an extra optimization.  So, if this method is ever
1493    #   changed such that it does stuff even when _cached_byte_size_dirty is
1494    #   already true, the callers need to be updated.
1495    if not self._cached_byte_size_dirty:
1496      self._cached_byte_size_dirty = True
1497      self._listener_for_children.dirty = True
1498      self._is_present_in_parent = True
1499      self._listener.Modified()

Sets the _cached_byte_size_dirty bit to true, and propagates this to our listener iff this was a state change.

def IsInitialized(self, errors=None):
1262  def IsInitialized(self, errors=None):
1263    """Checks if all required fields of a message are set.
1264
1265    Args:
1266      errors:  A list which, if provided, will be populated with the field
1267               paths of all missing required fields.
1268
1269    Returns:
1270      True iff the specified message has all required fields set.
1271    """
1272
1273    # Performance is critical so we avoid HasField() and ListFields().
1274
1275    for field in required_fields:
1276      if (field not in self._fields or
1277          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1278           not self._fields[field]._is_present_in_parent)):
1279        if errors is not None:
1280          errors.extend(self.FindInitializationErrors())
1281        return False
1282
1283    for field, value in list(self._fields.items()):  # dict can change size!
1284      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1285        if field.label == _FieldDescriptor.LABEL_REPEATED:
1286          if (field.message_type._is_map_entry):
1287            continue
1288          for element in value:
1289            if not element.IsInitialized():
1290              if errors is not None:
1291                errors.extend(self.FindInitializationErrors())
1292              return False
1293        elif value._is_present_in_parent and not value.IsInitialized():
1294          if errors is not None:
1295            errors.extend(self.FindInitializationErrors())
1296          return False
1297
1298    return True

Checks if all required fields of a message are set.

Arguments:
  • errors: A list which, if provided, will be populated with the field paths of all missing required fields.
Returns:

True iff the specified message has all required fields set.

def MergeFromString(self, serialized):
1177  def MergeFromString(self, serialized):
1178    serialized = memoryview(serialized)
1179    length = len(serialized)
1180    try:
1181      if self._InternalParse(serialized, 0, length) != length:
1182        # The only reason _InternalParse would return early is if it
1183        # encountered an end-group tag.
1184        raise message_mod.DecodeError('Unexpected end-group tag.')
1185    except (IndexError, TypeError):
1186      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1187      raise message_mod.DecodeError('Truncated message.')
1188    except struct.error as e:
1189      raise message_mod.DecodeError(e)
1190    return length   # Return this for legacy reasons.

Merges serialized protocol buffer data into this message.

When we find a field in serialized that is already present in this message:

  • If it's a "repeated" field, we append to the end of our list.
  • Else, if it's a scalar, we overwrite our field.
  • Else, (it's a nonrepeated composite), we recursively merge into the existing composite.
Arguments:
  • serialized (bytes): Any object that allows us to call memoryview(serialized) to access a string of bytes using the buffer interface.
Returns:

int: The number of bytes read from serialized. For non-group messages, this will always be len(serialized), but for messages which are actually groups, this will generally be less than len(serialized), since we must stop when we reach an END_GROUP tag. Note that if we do stop because of an END_GROUP tag, the number of bytes returned does not include the bytes for the END_GROUP tag information.

Raises:
  • DecodeError: if the input cannot be parsed.
def SerializeToString(self, **kwargs):
1130  def SerializeToString(self, **kwargs):
1131    # Check if the message has all of its required fields set.
1132    if not self.IsInitialized():
1133      raise message_mod.EncodeError(
1134          'Message %s is missing required fields: %s' % (
1135          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1136    return self.SerializePartialToString(**kwargs)

Serializes the protocol message to a binary string.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

A binary string representation of the message if all of the required fields in the message are set (i.e. the message is initialized).

Raises:
def SerializePartialToString(self, **kwargs):
1143  def SerializePartialToString(self, **kwargs):
1144    out = BytesIO()
1145    self._InternalSerialize(out.write, **kwargs)
1146    return out.getvalue()

Serializes the protocol message to a binary string.

This method is similar to SerializeToString but doesn't check if the message is initialized.

Keyword Args:

deterministic (bool): If true, requests deterministic serialization of the protobuf, with predictable ordering of map keys.

Returns:

bytes: A serialized representation of the partial message.

def ListFields(self):
848  def ListFields(self):
849    all_fields = [item for item in self._fields.items() if _IsPresent(item)]
850    all_fields.sort(key = lambda item: item[0].number)
851    return all_fields

Returns a list of (FieldDescriptor, value) tuples for present fields.

A message field is non-empty if HasField() would return true. A singular primitive field is non-empty if HasField() would return true in proto2 or it is non zero in proto3. A repeated field is non-empty if it contains at least one element. The fields are ordered by field number.

Returns:

list[tuple(FieldDescriptor, value)]: field descriptors and values for all fields in the message which are not empty. The values vary by field type.

def HasField(self, field_name):
872  def HasField(self, field_name):
873    try:
874      field = hassable_fields[field_name]
875    except KeyError as exc:
876      raise ValueError('Protocol message %s has no non-repeated field "%s" '
877                       'nor has presence is not available for this field.' % (
878                           message_descriptor.full_name, field_name)) from exc
879
880    if isinstance(field, descriptor_mod.OneofDescriptor):
881      try:
882        return HasField(self, self._oneofs[field].name)
883      except KeyError:
884        return False
885    else:
886      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
887        value = self._fields.get(field)
888        return value is not None and value._is_present_in_parent
889      else:
890        return field in self._fields

Checks if a certain field is set for the message.

For a oneof group, checks if any field inside is set. Note that if the field_name is not defined in the message descriptor, ValueError will be raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Returns:

bool: Whether a value has been set for the named field.

Raises:
  • ValueError: if the field_name is not a member of this message.
def ClearField(self, field_name):
897  def ClearField(self, field_name):
898    try:
899      field = message_descriptor.fields_by_name[field_name]
900    except KeyError:
901      try:
902        field = message_descriptor.oneofs_by_name[field_name]
903        if field in self._oneofs:
904          field = self._oneofs[field]
905        else:
906          return
907      except KeyError:
908        raise ValueError('Protocol message %s has no "%s" field.' %
909                         (message_descriptor.name, field_name))
910
911    if field in self._fields:
912      # To match the C++ implementation, we need to invalidate iterators
913      # for map fields when ClearField() happens.
914      if hasattr(self._fields[field], 'InvalidateIterators'):
915        self._fields[field].InvalidateIterators()
916
917      # Note:  If the field is a sub-message, its listener will still point
918      #   at us.  That's fine, because the worst than can happen is that it
919      #   will call _Modified() and invalidate our byte size.  Big deal.
920      del self._fields[field]
921
922      if self._oneofs.get(field.containing_oneof, None) is field:
923        del self._oneofs[field.containing_oneof]
924
925    # Always call _Modified() -- even if nothing was changed, this is
926    # a mutating method, and thus calling it should cause the field to become
927    # present in the parent message.
928    self._Modified()

Clears the contents of a given field.

Inside a oneof group, clears the field set. If the name neither refers to a defined field or oneof group, ValueError is raised.

Arguments:
  • field_name (str): The name of the field to check for presence.
Raises:
  • ValueError: if the field_name is not a member of this message.
def WhichOneof(self, oneof_name):
1403  def WhichOneof(self, oneof_name):
1404    """Returns the name of the currently set field inside a oneof, or None."""
1405    try:
1406      field = message_descriptor.oneofs_by_name[oneof_name]
1407    except KeyError:
1408      raise ValueError(
1409          'Protocol message has no oneof "%s" field.' % oneof_name)
1410
1411    nested_field = self._oneofs.get(field, None)
1412    if nested_field is not None and self.HasField(nested_field.name):
1413      return nested_field.name
1414    else:
1415      return None

Returns the name of the currently set field inside a oneof, or None.

def DiscardUnknownFields(self):
1435def _DiscardUnknownFields(self):
1436  self._unknown_fields = []
1437  for field, value in self.ListFields():
1438    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1439      if _IsMapField(field):
1440        if _IsMessageMapField(field):
1441          for key in value:
1442            value[key].DiscardUnknownFields()
1443      elif field.label == _FieldDescriptor.LABEL_REPEATED:
1444        for sub_message in value:
1445          sub_message.DiscardUnknownFields()
1446      else:
1447        value.DiscardUnknownFields()

Clears all fields in the UnknownFieldSet.

This operation is recursive for nested message.

def ByteSize(self):
1098  def ByteSize(self):
1099    if not self._cached_byte_size_dirty:
1100      return self._cached_byte_size
1101
1102    size = 0
1103    descriptor = self.DESCRIPTOR
1104    if descriptor._is_map_entry:
1105      # Fields of map entry should always be serialized.
1106      key_field = descriptor.fields_by_name['key']
1107      _MaybeAddEncoder(cls, key_field)
1108      size = key_field._sizer(self.key)
1109      value_field = descriptor.fields_by_name['value']
1110      _MaybeAddEncoder(cls, value_field)
1111      size += value_field._sizer(self.value)
1112    else:
1113      for field_descriptor, field_value in self.ListFields():
1114        _MaybeAddEncoder(cls, field_descriptor)
1115        size += field_descriptor._sizer(field_value)
1116      for tag_bytes, value_bytes in self._unknown_fields:
1117        size += len(tag_bytes) + len(value_bytes)
1118
1119    self._cached_byte_size = size
1120    self._cached_byte_size_dirty = False
1121    self._listener_for_children.dirty = False
1122    return size

Returns the serialized size of this message.

Recursively calls ByteSize() on all contained messages.

Returns:

int: The number of bytes required to serialize this message.

def FromString(s):
826  def FromString(s):
827    message = cls()
828    message.MergeFromString(s)
829    return message
PRIMAL_BOUND_FIELD_NUMBER = 1
primal_bound
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for primal_bound.

DUAL_BOUND_FIELD_NUMBER = 2
dual_bound
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for dual_bound.

EXPLORED_NODES_FIELD_NUMBER = 3
explored_nodes
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for explored_nodes.

OPEN_NODES_FIELD_NUMBER = 4
open_nodes
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for open_nodes.

SIMPLEX_ITERATIONS_FIELD_NUMBER = 5
simplex_iterations
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for simplex_iterations.

NUMBER_OF_SOLUTIONS_FOUND_FIELD_NUMBER = 6
number_of_solutions_found
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for number_of_solutions_found.

CUTTING_PLANES_IN_LP_FIELD_NUMBER = 7
cutting_planes_in_lp
711  def getter(self):
712    # TODO: This may be broken since there may not be
713    # default_value.  Combine with has_default_value somehow.
714    return self._fields.get(field, default_value)

Getter for cutting_planes_in_lp.

def FindInitializationErrors(self):
1302  def FindInitializationErrors(self):
1303    """Finds required fields which are not initialized.
1304
1305    Returns:
1306      A list of strings.  Each string is a path to an uninitialized field from
1307      the top-level message, e.g. "foo.bar[5].baz".
1308    """
1309
1310    errors = []  # simplify things
1311
1312    for field in required_fields:
1313      if not self.HasField(field.name):
1314        errors.append(field.name)
1315
1316    for field, value in self.ListFields():
1317      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1318        if field.is_extension:
1319          name = '(%s)' % field.full_name
1320        else:
1321          name = field.name
1322
1323        if _IsMapField(field):
1324          if _IsMessageMapField(field):
1325            for key in value:
1326              element = value[key]
1327              prefix = '%s[%s].' % (name, key)
1328              sub_errors = element.FindInitializationErrors()
1329              errors += [prefix + error for error in sub_errors]
1330          else:
1331            # ScalarMaps can't have any initialization errors.
1332            pass
1333        elif field.label == _FieldDescriptor.LABEL_REPEATED:
1334          for i in range(len(value)):
1335            element = value[i]
1336            prefix = '%s[%d].' % (name, i)
1337            sub_errors = element.FindInitializationErrors()
1338            errors += [prefix + error for error in sub_errors]
1339        else:
1340          prefix = name + '.'
1341          sub_errors = value.FindInitializationErrors()
1342          errors += [prefix + error for error in sub_errors]
1343
1344    return errors

Finds required fields which are not initialized.

Returns:

A list of strings. Each string is a path to an uninitialized field from the top-level message, e.g. "foo.bar[5].baz".

Inherited Members
google.protobuf.message.Message
CopyFrom
ParseFromString
HasExtension
ClearExtension
UnknownFields
@dataclasses.dataclass
class CallbackData:
66@dataclasses.dataclass
67class CallbackData:
68    """Input to the solve callback (produced by the solver).
69
70    Attributes:
71      event: The current state of the solver when the callback is run. The event
72        (partially) determines what data is available and what the user is allowed
73        to return.
74      solution: A solution to the primal optimization problem, if available. For
75        Event.MIP_SOLUTION, solution is always present, integral, and feasible.
76        For Event.MIP_NODE, the primal_solution contains the current LP-node
77        relaxation. In some cases, no solution will be available (e.g. because LP
78        was infeasible or the solve was imprecise). Empty for other events.
79      messages: Logs generated by the underlying solver, as a list of strings
80        without new lines (each string is a line). Only filled on Event.MESSAGE.
81      runtime: The time since Solve() was invoked.
82      presolve_stats: Filled for Event.PRESOLVE only.
83      simplex_stats: Filled for Event.SIMPLEX only.
84      barrier_stats: Filled for Event.BARRIER only.
85      mip_stats: Filled for the events MIP, MIP_SOLUTION and MIP_NODE only.
86    """
87
88    event: Event = Event.UNSPECIFIED
89    solution: Optional[Dict[model.Variable, float]] = None
90    messages: List[str] = dataclasses.field(default_factory=list)
91    runtime: datetime.timedelta = datetime.timedelta()
92    presolve_stats: PresolveStats = dataclasses.field(default_factory=PresolveStats)
93    simplex_stats: SimplexStats = dataclasses.field(default_factory=SimplexStats)
94    barrier_stats: BarrierStats = dataclasses.field(default_factory=BarrierStats)
95    mip_stats: MipStats = dataclasses.field(default_factory=MipStats)

Input to the solve callback (produced by the solver).

Attributes:
  • event: The current state of the solver when the callback is run. The event (partially) determines what data is available and what the user is allowed to return.
  • solution: A solution to the primal optimization problem, if available. For Event.MIP_SOLUTION, solution is always present, integral, and feasible. For Event.MIP_NODE, the primal_solution contains the current LP-node relaxation. In some cases, no solution will be available (e.g. because LP was infeasible or the solve was imprecise). Empty for other events.
  • messages: Logs generated by the underlying solver, as a list of strings without new lines (each string is a line). Only filled on Event.MESSAGE.
  • runtime: The time since Solve() was invoked.
  • presolve_stats: Filled for Event.PRESOLVE only.
  • simplex_stats: Filled for Event.SIMPLEX only.
  • barrier_stats: Filled for Event.BARRIER only.
  • mip_stats: Filled for the events MIP, MIP_SOLUTION and MIP_NODE only.
CallbackData( event: Event = <Event.UNSPECIFIED: 0>, solution: Optional[Dict[ortools.math_opt.python.model.Variable, float]] = None, messages: List[str] = <factory>, runtime: datetime.timedelta = datetime.timedelta(0), presolve_stats: PresolveStats = <factory>, simplex_stats: SimplexStats = <factory>, barrier_stats: BarrierStats = <factory>, mip_stats: MipStats = <factory>)
event: Event = <Event.UNSPECIFIED: 0>
solution: Optional[Dict[ortools.math_opt.python.model.Variable, float]] = None
messages: List[str]
runtime: datetime.timedelta = datetime.timedelta(0)
presolve_stats: PresolveStats
simplex_stats: SimplexStats
barrier_stats: BarrierStats
mip_stats: MipStats
 98def parse_callback_data(
 99    cb_data: callback_pb2.CallbackDataProto, mod: model.Model
100) -> CallbackData:
101    """Creates a CallbackData from an equivalent proto.
102
103    Args:
104      cb_data: A protocol buffer with the information the user needs for a
105        callback.
106      mod: The model being solved.
107
108    Returns:
109      An equivalent CallbackData.
110
111    Raises:
112      ValueError: if cb_data is invalid or inconsistent with mod, e.g. cb_data
113      refers to a variable id not in mod.
114    """
115    result = CallbackData()
116    result.event = Event(cb_data.event)
117    if cb_data.HasField("primal_solution_vector"):
118        primal_solution = cb_data.primal_solution_vector
119        result.solution = {
120            mod.get_variable(id): val
121            for (id, val) in zip(primal_solution.ids, primal_solution.values)
122        }
123    result.runtime = cb_data.runtime.ToTimedelta()
124    result.presolve_stats = cb_data.presolve_stats
125    result.simplex_stats = cb_data.simplex_stats
126    result.barrier_stats = cb_data.barrier_stats
127    result.mip_stats = cb_data.mip_stats
128    return result

Creates a CallbackData from an equivalent proto.

Arguments:
  • cb_data: A protocol buffer with the information the user needs for a callback.
  • mod: The model being solved.
Returns:

An equivalent CallbackData.

Raises:
  • ValueError: if cb_data is invalid or inconsistent with mod, e.g. cb_data
  • refers to a variable id not in mod.
@dataclasses.dataclass
class CallbackRegistration:
131@dataclasses.dataclass
132class CallbackRegistration:
133    """Request the events and input data and reports output types for a callback.
134
135    Note that it is an error to add a constraint in a callback without setting
136    add_cuts and/or add_lazy_constraints to true.
137
138    Attributes:
139      events: When the callback should be invoked, by default, never. If an
140        unsupported event for a solver/model combination is selected, an
141        excecption is raised, see Event above for details.
142      mip_solution_filter: restricts the variable values returned in
143        CallbackData.solution (the callback argument) at each MIP_SOLUTION event.
144        By default, values are returned for all variables.
145      mip_node_filter: restricts the variable values returned in
146        CallbackData.solution (the callback argument) at each MIP_NODE event. By
147        default, values are returned for all variables.
148      add_cuts: The callback may add "user cuts" (linear constraints that
149        strengthen the LP without cutting of integer points) at MIP_NODE events.
150      add_lazy_constraints: The callback may add "lazy constraints" (linear
151        constraints that cut off integer solutions) at MIP_NODE or MIP_SOLUTION
152        events.
153    """
154
155    events: Set[Event] = dataclasses.field(default_factory=set)
156    mip_solution_filter: sparse_containers.VariableFilter = (
157        sparse_containers.VariableFilter()
158    )
159    mip_node_filter: sparse_containers.VariableFilter = (
160        sparse_containers.VariableFilter()
161    )
162    add_cuts: bool = False
163    add_lazy_constraints: bool = False
164
165    def to_proto(self) -> callback_pb2.CallbackRegistrationProto:
166        """Returns an equivalent proto to this CallbackRegistration."""
167        result = callback_pb2.CallbackRegistrationProto()
168        result.request_registration[:] = sorted([event.value for event in self.events])
169        result.mip_solution_filter.CopyFrom(self.mip_solution_filter.to_proto())
170        result.mip_node_filter.CopyFrom(self.mip_node_filter.to_proto())
171        result.add_cuts = self.add_cuts
172        result.add_lazy_constraints = self.add_lazy_constraints
173        return result

Request the events and input data and reports output types for a callback.

Note that it is an error to add a constraint in a callback without setting add_cuts and/or add_lazy_constraints to true.

Attributes:
  • events: When the callback should be invoked, by default, never. If an unsupported event for a solver/model combination is selected, an excecption is raised, see Event above for details.
  • mip_solution_filter: restricts the variable values returned in CallbackData.solution (the callback argument) at each MIP_SOLUTION event. By default, values are returned for all variables.
  • mip_node_filter: restricts the variable values returned in CallbackData.solution (the callback argument) at each MIP_NODE event. By default, values are returned for all variables.
  • add_cuts: The callback may add "user cuts" (linear constraints that strengthen the LP without cutting of integer points) at MIP_NODE events.
  • add_lazy_constraints: The callback may add "lazy constraints" (linear constraints that cut off integer solutions) at MIP_NODE or MIP_SOLUTION events.
events: Set[Event]
add_cuts: bool = False
add_lazy_constraints: bool = False
def to_proto(self) -> ortools.math_opt.callback_pb2.CallbackRegistrationProto:
165    def to_proto(self) -> callback_pb2.CallbackRegistrationProto:
166        """Returns an equivalent proto to this CallbackRegistration."""
167        result = callback_pb2.CallbackRegistrationProto()
168        result.request_registration[:] = sorted([event.value for event in self.events])
169        result.mip_solution_filter.CopyFrom(self.mip_solution_filter.to_proto())
170        result.mip_node_filter.CopyFrom(self.mip_node_filter.to_proto())
171        result.add_cuts = self.add_cuts
172        result.add_lazy_constraints = self.add_lazy_constraints
173        return result

Returns an equivalent proto to this CallbackRegistration.

@dataclasses.dataclass
class GeneratedConstraint:
176@dataclasses.dataclass
177class GeneratedConstraint:
178    """A linear constraint to add inside a callback.
179
180    Models a constraint of the form:
181      lb <= sum_{i in I} a_i * x_i <= ub
182
183    Two types of generated linear constraints are supported based on is_lazy:
184      * The "lazy constraint" can remove integer points from the feasible
185        region and can be added at event Event.MIP_NODE or
186        Event.MIP_SOLUTION
187      * The "user cut" (on is_lazy=false) strengthens the LP without removing
188        integer points. It can only be added at Event.MIP_NODE.
189
190
191    Attributes:
192      terms: The variables and linear coefficients in the constraint, a_i and x_i
193        in the model above.
194      lower_bound: lb in the model above.
195      upper_bound: ub in the model above.
196      is_lazy: Indicates if the constraint should be interpreted as a "lazy
197        constraint" (cuts off integer solutions) or a "user cut" (strengthens the
198        LP relaxation without cutting of integer solutions).
199    """
200
201    terms: Mapping[model.Variable, float] = dataclasses.field(default_factory=dict)
202    lower_bound: float = -math.inf
203    upper_bound: float = math.inf
204    is_lazy: bool = False
205
206    def to_proto(
207        self,
208    ) -> callback_pb2.CallbackResultProto.GeneratedLinearConstraint:
209        """Returns an equivalent proto for the constraint."""
210        result = callback_pb2.CallbackResultProto.GeneratedLinearConstraint()
211        result.is_lazy = self.is_lazy
212        result.lower_bound = self.lower_bound
213        result.upper_bound = self.upper_bound
214        result.linear_expression.CopyFrom(
215            sparse_containers.to_sparse_double_vector_proto(self.terms)
216        )
217        return result

A linear constraint to add inside a callback.

Models a constraint of the form:

lb <= sum_{i in I} a_i * x_i <= ub

Two types of generated linear constraints are supported based on is_lazy:

  • The "lazy constraint" can remove integer points from the feasible region and can be added at event Event.MIP_NODE or Event.MIP_SOLUTION
  • The "user cut" (on is_lazy=false) strengthens the LP without removing integer points. It can only be added at Event.MIP_NODE.
Attributes:
  • terms: The variables and linear coefficients in the constraint, a_i and x_i in the model above.
  • lower_bound: lb in the model above.
  • upper_bound: ub in the model above.
  • is_lazy: Indicates if the constraint should be interpreted as a "lazy constraint" (cuts off integer solutions) or a "user cut" (strengthens the LP relaxation without cutting of integer solutions).
GeneratedConstraint( terms: Mapping[ortools.math_opt.python.model.Variable, float] = <factory>, lower_bound: float = -inf, upper_bound: float = inf, is_lazy: bool = False)
lower_bound: float = -inf
upper_bound: float = inf
is_lazy: bool = False
def to_proto(self) -> ortools.math_opt.callback_pb2.GeneratedLinearConstraint:
206    def to_proto(
207        self,
208    ) -> callback_pb2.CallbackResultProto.GeneratedLinearConstraint:
209        """Returns an equivalent proto for the constraint."""
210        result = callback_pb2.CallbackResultProto.GeneratedLinearConstraint()
211        result.is_lazy = self.is_lazy
212        result.lower_bound = self.lower_bound
213        result.upper_bound = self.upper_bound
214        result.linear_expression.CopyFrom(
215            sparse_containers.to_sparse_double_vector_proto(self.terms)
216        )
217        return result

Returns an equivalent proto for the constraint.

@dataclasses.dataclass
class CallbackResult:
220@dataclasses.dataclass
221class CallbackResult:
222    """The value returned by a solve callback (produced by the user).
223
224    Attributes:
225      terminate: Stop the solve process and return early. Can be called from any
226        event.
227      generated_constraints: Constraints to add to the model. For details, see
228        GeneratedConstraint documentation.
229      suggested_solutions: A list of solutions (or partially defined solutions) to
230        suggest to the solver. Some solvers (e.g. gurobi) will try and convert a
231        partial solution into a full solution by solving a MIP. Use only for
232        Event.MIP_NODE.
233    """
234
235    terminate: bool = False
236    generated_constraints: List[GeneratedConstraint] = dataclasses.field(
237        default_factory=list
238    )
239    suggested_solutions: List[Mapping[model.Variable, float]] = dataclasses.field(
240        default_factory=list
241    )
242
243    def add_generated_constraint(
244        self,
245        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
246        *,
247        lb: Optional[float] = None,
248        ub: Optional[float] = None,
249        expr: Optional[model.LinearTypes] = None,
250        is_lazy: bool,
251    ) -> None:
252        """Adds a linear constraint to the list of generated constraints.
253
254        The constraint can be of two exclusive types: a "lazy constraint" or a
255        "user cut. A "user cut" is a constraint that excludes the current LP
256        solution, but does not cut off any integer-feasible points that satisfy the
257        already added constraints (either in callbacks or through
258        Model.add_linear_constraint()). A "lazy constraint" is a constraint that
259        excludes such integer-feasible points and hence is needed for corrctness of
260        the forlumation.
261
262        The simplest way to specify the constraint is by passing a one-sided or
263        two-sided linear inequality as in:
264          * add_generated_constraint(x + y + 1.0 <= 2.0, is_lazy=True),
265          * add_generated_constraint(x + y >= 2.0, is_lazy=True), or
266          * add_generated_constraint((1.0 <= x + y) <= 2.0, is_lazy=True).
267
268        Note the extra parenthesis for two-sided linear inequalities, which is
269        required due to some language limitations (see
270        https://peps.python.org/pep-0335/ and https://peps.python.org/pep-0535/).
271        If the parenthesis are omitted, a TypeError will be raised explaining the
272        issue (if this error was not raised the first inequality would have been
273        silently ignored because of the noted language limitations).
274
275        The second way to specify the constraint is by setting lb, ub, and/o expr as
276        in:
277          * add_generated_constraint(expr=x + y + 1.0, ub=2.0, is_lazy=True),
278          * add_generated_constraint(expr=x + y, lb=2.0, is_lazy=True),
279          * add_generated_constraint(expr=x + y, lb=1.0, ub=2.0, is_lazy=True), or
280          * add_generated_constraint(lb=1.0, is_lazy=True).
281        Omitting lb is equivalent to setting it to -math.inf and omiting ub is
282        equivalent to setting it to math.inf.
283
284        These two alternatives are exclusive and a combined call like:
285          * add_generated_constraint(x + y <= 2.0, lb=1.0, is_lazy=True), or
286          * add_generated_constraint(x + y <= 2.0, ub=math.inf, is_lazy=True)
287        will raise a ValueError. A ValueError is also raised if expr's offset is
288        infinite.
289
290        Args:
291          bounded_expr: a linear inequality describing the constraint. Cannot be
292            specified together with lb, ub, or expr.
293          lb: The constraint's lower bound if bounded_expr is omitted (if both
294            bounder_expr and lb are omitted, the lower bound is -math.inf).
295          ub: The constraint's upper bound if bounded_expr is omitted (if both
296            bounder_expr and ub are omitted, the upper bound is math.inf).
297          expr: The constraint's linear expression if bounded_expr is omitted.
298          is_lazy: Whether the constraint is lazy or not.
299        """
300        normalized_inequality = model.as_normalized_linear_inequality(
301            bounded_expr, lb=lb, ub=ub, expr=expr
302        )
303        self.generated_constraints.append(
304            GeneratedConstraint(
305                lower_bound=normalized_inequality.lb,
306                terms=normalized_inequality.coefficients,
307                upper_bound=normalized_inequality.ub,
308                is_lazy=is_lazy,
309            )
310        )
311
312    def add_lazy_constraint(
313        self,
314        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
315        *,
316        lb: Optional[float] = None,
317        ub: Optional[float] = None,
318        expr: Optional[model.LinearTypes] = None,
319    ) -> None:
320        """Shortcut for add_generated_constraint(..., is_lazy=True).."""
321        self.add_generated_constraint(
322            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=True
323        )
324
325    def add_user_cut(
326        self,
327        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
328        *,
329        lb: Optional[float] = None,
330        ub: Optional[float] = None,
331        expr: Optional[model.LinearTypes] = None,
332    ) -> None:
333        """Shortcut for add_generated_constraint(..., is_lazy=False)."""
334        self.add_generated_constraint(
335            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=False
336        )
337
338    def to_proto(self) -> callback_pb2.CallbackResultProto:
339        """Returns a proto equivalent to this CallbackResult."""
340        result = callback_pb2.CallbackResultProto(terminate=self.terminate)
341        for generated_constraint in self.generated_constraints:
342            result.cuts.add().CopyFrom(generated_constraint.to_proto())
343        for suggested_solution in self.suggested_solutions:
344            result.suggested_solutions.add().CopyFrom(
345                sparse_containers.to_sparse_double_vector_proto(suggested_solution)
346            )
347        return result

The value returned by a solve callback (produced by the user).

Attributes:
  • terminate: Stop the solve process and return early. Can be called from any event.
  • generated_constraints: Constraints to add to the model. For details, see GeneratedConstraint documentation.
  • suggested_solutions: A list of solutions (or partially defined solutions) to suggest to the solver. Some solvers (e.g. gurobi) will try and convert a partial solution into a full solution by solving a MIP. Use only for Event.MIP_NODE.
CallbackResult( terminate: bool = False, generated_constraints: List[GeneratedConstraint] = <factory>, suggested_solutions: List[Mapping[ortools.math_opt.python.model.Variable, float]] = <factory>)
terminate: bool = False
generated_constraints: List[GeneratedConstraint]
suggested_solutions: List[Mapping[ortools.math_opt.python.model.Variable, float]]
def add_generated_constraint( self, bounded_expr: Union[bool, ortools.math_opt.python.model.LowerBoundedLinearExpression, ortools.math_opt.python.model.UpperBoundedLinearExpression, ortools.math_opt.python.model.BoundedLinearExpression, ortools.math_opt.python.model.VarEqVar, NoneType] = None, *, lb: Optional[float] = None, ub: Optional[float] = None, expr: Union[int, float, ForwardRef('LinearBase'), NoneType] = None, is_lazy: bool) -> None:
243    def add_generated_constraint(
244        self,
245        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
246        *,
247        lb: Optional[float] = None,
248        ub: Optional[float] = None,
249        expr: Optional[model.LinearTypes] = None,
250        is_lazy: bool,
251    ) -> None:
252        """Adds a linear constraint to the list of generated constraints.
253
254        The constraint can be of two exclusive types: a "lazy constraint" or a
255        "user cut. A "user cut" is a constraint that excludes the current LP
256        solution, but does not cut off any integer-feasible points that satisfy the
257        already added constraints (either in callbacks or through
258        Model.add_linear_constraint()). A "lazy constraint" is a constraint that
259        excludes such integer-feasible points and hence is needed for corrctness of
260        the forlumation.
261
262        The simplest way to specify the constraint is by passing a one-sided or
263        two-sided linear inequality as in:
264          * add_generated_constraint(x + y + 1.0 <= 2.0, is_lazy=True),
265          * add_generated_constraint(x + y >= 2.0, is_lazy=True), or
266          * add_generated_constraint((1.0 <= x + y) <= 2.0, is_lazy=True).
267
268        Note the extra parenthesis for two-sided linear inequalities, which is
269        required due to some language limitations (see
270        https://peps.python.org/pep-0335/ and https://peps.python.org/pep-0535/).
271        If the parenthesis are omitted, a TypeError will be raised explaining the
272        issue (if this error was not raised the first inequality would have been
273        silently ignored because of the noted language limitations).
274
275        The second way to specify the constraint is by setting lb, ub, and/o expr as
276        in:
277          * add_generated_constraint(expr=x + y + 1.0, ub=2.0, is_lazy=True),
278          * add_generated_constraint(expr=x + y, lb=2.0, is_lazy=True),
279          * add_generated_constraint(expr=x + y, lb=1.0, ub=2.0, is_lazy=True), or
280          * add_generated_constraint(lb=1.0, is_lazy=True).
281        Omitting lb is equivalent to setting it to -math.inf and omiting ub is
282        equivalent to setting it to math.inf.
283
284        These two alternatives are exclusive and a combined call like:
285          * add_generated_constraint(x + y <= 2.0, lb=1.0, is_lazy=True), or
286          * add_generated_constraint(x + y <= 2.0, ub=math.inf, is_lazy=True)
287        will raise a ValueError. A ValueError is also raised if expr's offset is
288        infinite.
289
290        Args:
291          bounded_expr: a linear inequality describing the constraint. Cannot be
292            specified together with lb, ub, or expr.
293          lb: The constraint's lower bound if bounded_expr is omitted (if both
294            bounder_expr and lb are omitted, the lower bound is -math.inf).
295          ub: The constraint's upper bound if bounded_expr is omitted (if both
296            bounder_expr and ub are omitted, the upper bound is math.inf).
297          expr: The constraint's linear expression if bounded_expr is omitted.
298          is_lazy: Whether the constraint is lazy or not.
299        """
300        normalized_inequality = model.as_normalized_linear_inequality(
301            bounded_expr, lb=lb, ub=ub, expr=expr
302        )
303        self.generated_constraints.append(
304            GeneratedConstraint(
305                lower_bound=normalized_inequality.lb,
306                terms=normalized_inequality.coefficients,
307                upper_bound=normalized_inequality.ub,
308                is_lazy=is_lazy,
309            )
310        )

Adds a linear constraint to the list of generated constraints.

The constraint can be of two exclusive types: a "lazy constraint" or a "user cut. A "user cut" is a constraint that excludes the current LP solution, but does not cut off any integer-feasible points that satisfy the already added constraints (either in callbacks or through Model.add_linear_constraint()). A "lazy constraint" is a constraint that excludes such integer-feasible points and hence is needed for corrctness of the forlumation.

The simplest way to specify the constraint is by passing a one-sided or two-sided linear inequality as in:

  • add_generated_constraint(x + y + 1.0 <= 2.0, is_lazy=True),
  • add_generated_constraint(x + y >= 2.0, is_lazy=True), or
  • add_generated_constraint((1.0 <= x + y) <= 2.0, is_lazy=True).

Note the extra parenthesis for two-sided linear inequalities, which is required due to some language limitations (see https://peps.python.org/pep-0335/ and https://peps.python.org/pep-0535/). If the parenthesis are omitted, a TypeError will be raised explaining the issue (if this error was not raised the first inequality would have been silently ignored because of the noted language limitations).

The second way to specify the constraint is by setting lb, ub, and/o expr as in:

  • add_generated_constraint(expr=x + y + 1.0, ub=2.0, is_lazy=True),
  • add_generated_constraint(expr=x + y, lb=2.0, is_lazy=True),
  • add_generated_constraint(expr=x + y, lb=1.0, ub=2.0, is_lazy=True), or
  • add_generated_constraint(lb=1.0, is_lazy=True). Omitting lb is equivalent to setting it to -math.inf and omiting ub is equivalent to setting it to math.inf.
These two alternatives are exclusive and a combined call like:
  • add_generated_constraint(x + y <= 2.0, lb=1.0, is_lazy=True), or
  • add_generated_constraint(x + y <= 2.0, ub=math.inf, is_lazy=True)

will raise a ValueError. A ValueError is also raised if expr's offset is infinite.

Arguments:
  • bounded_expr: a linear inequality describing the constraint. Cannot be specified together with lb, ub, or expr.
  • lb: The constraint's lower bound if bounded_expr is omitted (if both bounder_expr and lb are omitted, the lower bound is -math.inf).
  • ub: The constraint's upper bound if bounded_expr is omitted (if both bounder_expr and ub are omitted, the upper bound is math.inf).
  • expr: The constraint's linear expression if bounded_expr is omitted.
  • is_lazy: Whether the constraint is lazy or not.
def add_lazy_constraint( self, bounded_expr: Union[bool, ortools.math_opt.python.model.LowerBoundedLinearExpression, ortools.math_opt.python.model.UpperBoundedLinearExpression, ortools.math_opt.python.model.BoundedLinearExpression, ortools.math_opt.python.model.VarEqVar, NoneType] = None, *, lb: Optional[float] = None, ub: Optional[float] = None, expr: Union[int, float, ForwardRef('LinearBase'), NoneType] = None) -> None:
312    def add_lazy_constraint(
313        self,
314        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
315        *,
316        lb: Optional[float] = None,
317        ub: Optional[float] = None,
318        expr: Optional[model.LinearTypes] = None,
319    ) -> None:
320        """Shortcut for add_generated_constraint(..., is_lazy=True).."""
321        self.add_generated_constraint(
322            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=True
323        )

Shortcut for add_generated_constraint(..., is_lazy=True)..

def add_user_cut( self, bounded_expr: Union[bool, ortools.math_opt.python.model.LowerBoundedLinearExpression, ortools.math_opt.python.model.UpperBoundedLinearExpression, ortools.math_opt.python.model.BoundedLinearExpression, ortools.math_opt.python.model.VarEqVar, NoneType] = None, *, lb: Optional[float] = None, ub: Optional[float] = None, expr: Union[int, float, ForwardRef('LinearBase'), NoneType] = None) -> None:
325    def add_user_cut(
326        self,
327        bounded_expr: Optional[Union[bool, model.BoundedLinearTypes]] = None,
328        *,
329        lb: Optional[float] = None,
330        ub: Optional[float] = None,
331        expr: Optional[model.LinearTypes] = None,
332    ) -> None:
333        """Shortcut for add_generated_constraint(..., is_lazy=False)."""
334        self.add_generated_constraint(
335            bounded_expr, lb=lb, ub=ub, expr=expr, is_lazy=False
336        )

Shortcut for add_generated_constraint(..., is_lazy=False).

def to_proto(self) -> ortools.math_opt.callback_pb2.CallbackResultProto:
338    def to_proto(self) -> callback_pb2.CallbackResultProto:
339        """Returns a proto equivalent to this CallbackResult."""
340        result = callback_pb2.CallbackResultProto(terminate=self.terminate)
341        for generated_constraint in self.generated_constraints:
342            result.cuts.add().CopyFrom(generated_constraint.to_proto())
343        for suggested_solution in self.suggested_solutions:
344            result.suggested_solutions.add().CopyFrom(
345                sparse_containers.to_sparse_double_vector_proto(suggested_solution)
346            )
347        return result

Returns a proto equivalent to this CallbackResult.