diff --git a/CHANGELOG.md b/CHANGELOG.md index 8146174ae5..f216bcc7e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,36 @@ # Changelog ## Next Release -* Added support for ONNX networks with tanh nodes to command-line interface -* Added proof producing versions of Sign, Max, Absolute Value and Disjunction constraints -* Added support for properties provided in VNN-LIB format for ONNX networks via the Python API -* Fixed bugs when parsing Sigmoid layers in the C++ ONNX parser. -* Supported additional non-linear constraints Softmax and Bilinear -* Removed dependency on torch and drop support for Python3.7 -* Bumped ONNX version to >= 1.15.0 -* Added support for Leaky ReLU -* Added support for Pythonic API -* Command-line ONNX parser now supports networks with multiple outputs. -* Command-line ONNX parser now supports the following operators: Cast, Squeeze, LeakyRelu. -* Errors now are printed on `stderr` rather than `stdout` -* Not reindexing output variables to immediately follow after input variables in the MarabouNetworkONNX class + +* Dependency changes: + - Dropped support for Python 3.7 + - Now use ONNX 1.15.0 (up from 0.12.0) in both C++ and Python backends. + - The class `MarabouONNXNetwork` no longer depends on `torch` in Python backend. + +* Marabou now prints errors on `stderr` rather than `stdout` + +* Added proof producing versions of `Sign`, `Max`, `Absolute Value` and `Disjunction` constraints. + +* Changes to command-line ONNX support: + - Fixed bug with variable lower bounds not being set correctly. + - Fixed bug with sigmoid operators not being parsed correctly. + - Added support for `Tanh`, `Squeeze`, `LeakyRelu` and `Cast` operators. + - Added support for networks with multiple outputs + +* Added command-line support for properties in the VNNLIB format. + +* Changes to Python ONNX support: + - Added support for `Softmax`, `Bilinear` and `LeakyRelu` operators. + - `MarabouONNXNetwork` no longer exposes the fields `madeGraphEquations`, `varMap`, `constantMap`, `shapeMap` + as these were supposed to be internal implementation details. + - `MarabouONNXNetwork` no longer has a `shallowCopy` method. Instead of calling this method, + you should set the new parameter `preserveExistingConstraints` in the method `readONNX` to + `True` which has the same effect. + - The method `getMarabouQuery` on `MarabouNetwork` has been renamed `getInputQuery`. + +* Added support for creating constraints using the overloaded syntax `<=`, `==` etc. in + the Python backend. See `maraboupy/examples/7_PythonicAPI.py` for details. ## Version 1.0.0 + * Initial versioned release diff --git a/maraboupy/Marabou.py b/maraboupy/Marabou.py index 7038924b33..017a48e22b 100644 --- a/maraboupy/Marabou.py +++ b/maraboupy/Marabou.py @@ -28,7 +28,7 @@ try: from maraboupy.MarabouNetworkONNX import * except ImportError: - warnings.warn("ONNX parser is unavailable because onnx or onnxruntime or torch packages are not installed") + warnings.warn("ONNX parser is unavailable because onnx or onnxruntime packages are not installed") def read_nnet(filename, normalize=False): """Constructs a MarabouNetworkNnet object from a .nnet file diff --git a/maraboupy/MarabouNetwork.py b/maraboupy/MarabouNetwork.py index 47374619ba..2c6a704220 100644 --- a/maraboupy/MarabouNetwork.py +++ b/maraboupy/MarabouNetwork.py @@ -17,12 +17,11 @@ ''' from maraboupy import MarabouCore -from maraboupy import MarabouUtils -from maraboupy.MarabouPythonic import * +from maraboupy.parsers.InputQueryBuilder import InputQueryBuilder import numpy as np -class MarabouNetwork: +class MarabouNetwork(InputQueryBuilder): """Abstract class representing general Marabou network Attributes: @@ -40,29 +39,11 @@ class MarabouNetwork: outputVars (list of numpy arrays): Output variables """ def __init__(self): - """Constructs a MarabouNetwork object and calls function to initialize """ - self.clear() - - def clear(self): - """Reset values to represent empty network + Constructs a MarabouNetwork object and calls function to initialize """ - self.numVars = 0 - self.equList = [] - self.additionalEquList = [] # used to store user defined equations - self.reluList = [] - self.leakyReluList = [] - self.sigmoidList = [] - self.maxList = [] - self.softmaxList = [] - self.bilinearList = [] - self.absList = [] - self.signList = [] - self.disjunctionList = [] - self.lowerBounds = dict() - self.upperBounds = dict() - self.inputVars = [] - self.outputVars = [] + super().__init__() + self.clear() def clearProperty(self): """Clear the lower bounds and upper bounds map, and the self.additionEquList @@ -71,279 +52,6 @@ def clearProperty(self): self.upperBounds.clear() self.additionalEquList.clear() - def getNewVariable(self): - """Function to create a new variable - - Returns: - (int): New variable number - - :meta private: - """ - self.numVars += 1 - return self.numVars - 1 - - def addEquation(self, x, isProperty=False): - """Function to add new equation to the network - - Args: - x (:class:`~maraboupy.MarabouUtils.Equation`): New equation to add - isProperty (bool): If true, this constraint can be removed later by clearProperty() method - """ - if isProperty: - self.additionalEquList += [x] - else: - self.equList += [x] - - def setLowerBound(self, x, v): - """Function to set lower bound for variable - - Args: - x (int): Variable number to set - v (float): Value representing lower bound - """ - self.lowerBounds[x]=v - - def setUpperBound(self, x, v): - """Function to set upper bound for variable - - Args: - x (int): Variable number to set - v (float): Value representing upper bound - """ - self.upperBounds[x]=v - - def addRelu(self, v1, v2): - """Function to add a new Relu constraint - - Args: - v1 (int): Variable representing input of Relu - v2 (int): Variable representing output of Relu - """ - self.reluList += [(v1, v2)] - - def addLeakyRelu(self, v1, v2, slope): - """Function to add a new Leaky Relu constraint - - Args: - v1 (int): Variable representing input of Leaky Relu - v2 (int): Variable representing output of Leaky Relu - slope (float): Slope of the Leaky ReLU - """ - self.leakyReluList += [(v1, v2, slope)] - - def addBilinear(self, v1, v2, v3): - """Function to add a bilinear constraint to the network - Args: - v1 (int): Variable representing input1 of Bilinear - v2 (int): Variable representing input2 of Bilinear - v3 (int): Variable representing output of Bilinear - """ - self.bilinearList += [(v1, v2, v3)] - - def addSigmoid(self, v1, v2): - """Function to add a new Sigmoid constraint - - Args: - v1 (int): Variable representing input of Sigmoid - v2 (int): Variable representing output of Sigmoid - """ - self.sigmoidList += [(v1, v2)] - - def addMaxConstraint(self, elements, v): - """Function to add a new Max constraint - - Args: - elements (set of int): Variable representing input to max constraint - v (int): Variable representing output of max constraint - """ - self.maxList += [(elements, v)] - - def addSoftmaxConstraint(self, inputs, outputs): - """Function to add a new softmax constraint - - Args: - inputs (set of int): Variable representing input to max constraint - outputs (set of int): Variables representing outputs of max constraint - """ - self.softmaxList += [(inputs, outputs)] - - def addAbsConstraint(self, b, f): - """Function to add a new Abs constraint - - Args: - b (int): Variable representing input of the Abs constraint - f (int): Variable representing output of the Abs constraint - """ - self.absList += [(b, f)] - - def addSignConstraint(self, b, f): - """Function to add a new Sign constraint - - Args: - b (int): Variable representing input of Sign - f (int): Variable representing output of Sign - """ - self.signList += [(b, f)] - - def addDisjunctionConstraint(self, disjuncts): - """Function to add a new Disjunction constraint - - Args: - disjuncts (list of list of Equations): Each inner list represents a disjunct - """ - self.disjunctionList.append(disjuncts) - - def lowerBoundExists(self, x): - """Function to check whether lower bound for a variable is known - - Args: - x (int): Variable to check - """ - return x in self.lowerBounds - - def upperBoundExists(self, x): - """Function to check whether upper bound for a variable is known - - Args: - x (int): Variable to check - """ - return x in self.upperBounds - - def addEquality(self, vars, coeffs, scalar, isProperty=False): - """Function to add equality constraint to network - - .. math:: - \sum_i vars_i * coeffs_i = scalar - - Args: - vars (list of int): Variable numbers - coeffs (list of float): Coefficients - scalar (float): Right hand side constant of equation - isProperty (bool): If true, this constraint can be removed later by clearProperty() method - """ - assert len(vars)==len(coeffs) - e = MarabouUtils.Equation() - for i in range(len(vars)): - e.addAddend(coeffs[i], vars[i]) - e.setScalar(scalar) - self.addEquation(e, isProperty) - - def addInequality(self, vars, coeffs, scalar, isProperty=False): - """Function to add inequality constraint to network - - .. math:: - \sum_i vars_i * coeffs_i \le scalar - - Args: - vars (list of int): Variable numbers - coeffs (list of float): Coefficients - scalar (float): Right hand side constant of inequality - isProperty (bool): If true, this constraint can be removed later by clearProperty() method - """ - assert len(vars)==len(coeffs) - e = MarabouUtils.Equation(MarabouCore.Equation.LE) - for i in range(len(vars)): - e.addAddend(coeffs[i], vars[i]) - e.setScalar(scalar) - self.addEquation(e, isProperty) - - def getMarabouQuery(self): - """Function to convert network into Marabou InputQuery - - Returns: - :class:`~maraboupy.MarabouCore.InputQuery` - """ - ipq = MarabouCore.InputQuery() - ipq.setNumberOfVariables(self.numVars) - - i = 0 - for inputVarArray in self.inputVars: - for inputVar in inputVarArray.flatten(): - ipq.markInputVariable(inputVar, i) - i+=1 - - i = 0 - for outputVarArray in self.outputVars: - for outputVar in outputVarArray.flatten(): - ipq.markOutputVariable(outputVar, i) - i+=1 - - for e in self.equList: - eq = MarabouCore.Equation(e.EquationType) - for (c, v) in e.addendList: - assert v < self.numVars - eq.addAddend(c, v) - eq.setScalar(e.scalar) - ipq.addEquation(eq) - - for e in self.additionalEquList: - eq = MarabouCore.Equation(e.EquationType) - for (c, v) in e.addendList: - assert v < self.numVars - eq.addAddend(c, v) - eq.setScalar(e.scalar) - ipq.addEquation(eq) - - for r in self.reluList: - assert r[1] < self.numVars and r[0] < self.numVars - MarabouCore.addReluConstraint(ipq, r[0], r[1]) - - for r in self.leakyReluList: - assert r[1] < self.numVars and r[0] < self.numVars - assert(r[2] > 0 and r[2] < 1) - MarabouCore.addLeakyReluConstraint(ipq, r[0], r[1], r[2]) - - for r in self.bilinearList: - assert r[2] < self.numVars and r[1] < self.numVars and r[0] < self.numVars - MarabouCore.addBilinearConstraint(ipq, r[0], r[1], r[2]) - - for r in self.sigmoidList: - assert r[1] < self.numVars and r[0] < self.numVars - MarabouCore.addSigmoidConstraint(ipq, r[0], r[1]) - - for m in self.maxList: - assert m[1] < self.numVars - for e in m[0]: - assert e < self.numVars - MarabouCore.addMaxConstraint(ipq, m[0], m[1]) - - for m in self.softmaxList: - for e in m[1]: - assert e < self.numVars - for e in m[0]: - assert e < self.numVars - MarabouCore.addSoftmaxConstraint(ipq, m[0], m[1]) - - for b, f in self.absList: - MarabouCore.addAbsConstraint(ipq, b, f) - - for b, f in self.signList: - MarabouCore.addSignConstraint(ipq, b, f) - - for disjunction in self.disjunctionList: - converted_disjunction = [] - for disjunct in disjunction: - converted_disjunct = [] - for e in disjunct: - eq = MarabouCore.Equation(e.EquationType) - for (c, v) in e.addendList: - assert v < self.numVars - eq.addAddend(c, v) - eq.setScalar(e.scalar) - converted_disjunct.append(eq) - converted_disjunction.append(converted_disjunct) - MarabouCore.addDisjunctionConstraint(ipq, converted_disjunction) - - for l in self.lowerBounds: - assert l < self.numVars - ipq.setLowerBound(l, self.lowerBounds[l]) - - for u in self.upperBounds: - assert u < self.numVars - ipq.setUpperBound(u, self.upperBounds[u]) - - return ipq - def solve(self, filename="", verbose=True, options=None, propertyFilename=""): """Function to solve query represented by this network @@ -359,7 +67,7 @@ def solve(self, filename="", verbose=True, options=None, propertyFilename=""): - vals (Dict[int, float]): Empty dictionary if UNSAT, otherwise a dictionary of SATisfying values for variables - stats (:class:`~maraboupy.MarabouCore.Statistics`): A Statistics object to how Marabou performed """ - ipq = self.getMarabouQuery() + ipq = self.getInputQuery() if propertyFilename: MarabouCore.loadProperty(ipq, propertyFilename) if options == None: @@ -393,7 +101,7 @@ def calculateBounds(self, filename="", verbose=True, options=None): - bounds (Dict[int, tuple]): Empty dictionary if UNSAT, otherwise a dictionary of bounds for output variables - stats (:class:`~maraboupy.MarabouCore.Statistics`): A Statistics object to how Marabou performed """ - ipq = self.getMarabouQuery() + ipq = self.getInputQuery() if options == None: options = MarabouCore.Options() exitCode, bounds, stats = MarabouCore.calculateBounds(ipq, options, str(filename)) @@ -497,15 +205,6 @@ def evaluateLocalRobustness(self, input, epsilon, originalClass, verbose=True, o return [vals, stats, maxClass] - def saveQuery(self, filename=""): - """Serializes the inputQuery in the given filename - - Args: - filename: (string) file to write serialized inputQuery - """ - ipq = self.getMarabouQuery() - MarabouCore.saveQuery(ipq, str(filename)) - def evaluateWithMarabou(self, inputValues, filename="evaluateWithMarabou.log", options=None): """Function to evaluate network at a given point using Marabou as solver @@ -530,7 +229,7 @@ def evaluateWithMarabou(self, inputValues, filename="evaluateWithMarabou.log", o for x in assignList: inputDict[x[0]] = x[1] - ipq = self.getMarabouQuery() + ipq = self.getInputQuery() for k in inputDict: ipq.setLowerBound(k, inputDict[k]) ipq.setUpperBound(k, inputDict[k]) @@ -583,51 +282,3 @@ def findError(self, inputValues, options=None, filename="evaluateWithMarabou.log assert len(outMar) == len(outNotMar) err = [np.abs(outMar[i] - outNotMar[i]) for i in range(len(outMar))] return err - - def isEqualTo(self, network): - """ - Add a comparison between two Marabou networks and all their attributes. - - :param network: the other Marabou network to be compared with. - :return: True if these two networks and all their attributes are identical; False if not. - """ - equivalence = True - if self.numVars != network.numVars \ - or self.reluList != network.reluList \ - or self.sigmoidList != network.sigmoidList \ - or self.maxList != network.maxList \ - or self.absList != network.absList \ - or self.signList != network.signList \ - or self.disjunctionList != network.disjunctionList \ - or self.lowerBounds != network.lowerBounds \ - or self.upperBounds != network.upperBounds: - equivalence = False - for equation1, equation2 in zip(self.equList, network.equList): - if not equation1.isEqualTo(equation2): - equivalence = False - for inputvars1, inputvars2 in zip(self.inputVars, network.inputVars): - if (inputvars1.flatten() != inputvars2.flatten()).any(): - equivalence = False - for outputVars1, outputVars2 in zip(self.outputVars, network.outputVars): - if (outputVars1.flatten() != outputVars1.flatten()).any(): - equivalence = False - return equivalence - - def addConstraint(self, constraint: VarConstraint): - """ - Support the Pythonic API to add constraints to the neurons in the Marabou network. - - :param constraint: an instance of the VarConstraint class, which comprises various neuron constraints. - :return: delegate various constraints into lower/upper bounds and equality/inequality. - """ - vars = list(constraint.combination.varCoeffs) - coeffs = [constraint.combination.varCoeffs[i] for i in vars] - if constraint.lowerBound is not None: - self.setLowerBound(vars[0], constraint.lowerBound) - elif constraint.upperBound is not None: - self.setUpperBound(vars[0], constraint.upperBound) - else: - if constraint.isEquality: - self.addEquality(vars, coeffs, - constraint.combination.scalar) - else: - self.addInequality(vars, coeffs, - constraint.combination.scalar) diff --git a/maraboupy/MarabouNetworkONNX.py b/maraboupy/MarabouNetworkONNX.py index 0ebf69ee91..5f4303ef58 100644 --- a/maraboupy/MarabouNetworkONNX.py +++ b/maraboupy/MarabouNetworkONNX.py @@ -14,20 +14,13 @@ MarabouNetworkONNX represents neural networks with piecewise linear constraints derived from the ONNX format ''' -import numpy as np import onnx import onnxruntime -from onnx import numpy_helper -from onnx.helper import get_attribute_value -from maraboupy import MarabouUtils -from maraboupy import MarabouNetwork -from onnx import TensorProto -import itertools +from maraboupy.MarabouNetwork import MarabouNetwork +from maraboupy.parsers.ONNXParser import ONNXParser import os -from copy import copy -from onnx.reference.ops._op_list import Split_18, Unsqueeze_1 -class MarabouNetworkONNX(MarabouNetwork.MarabouNetwork): +class MarabouNetworkONNX(MarabouNetwork): """Constructs a MarabouNetworkONNX object from an ONNX file Args: @@ -40,90 +33,60 @@ class MarabouNetworkONNX(MarabouNetwork.MarabouNetwork): """ def __init__(self, filename, inputNames=None, outputNames=None, reindexOutputVars=False): super().__init__() - self.readONNX(filename, inputNames, outputNames, reindexOutputVars=reindexOutputVars) + self.readONNX(filename, inputNames, outputNames, reindexOutputVars) - def clear(self): - """Reset values to represent empty network - """ - super().clear() - self.madeGraphEquations = [] - self.varMap = dict() - self.constantMap = dict() - self.shapeMap = dict() - self.vnnlibMap = dict() - self.inputNames = None - self.outputNames = None - self.graph = None - - def shallowClear(self): - """Reset values to represent new copy - of network while maintaining - previous constraints. Used for - unrolling system dynamics. - """ - self.madeGraphEquations = [] - self.varMap = dict() - self.constantMap = dict() - self.shapeMap = dict() - self.inputNames = None - self.outputNames = None - self.graph = None - - def readONNX(self, filename, inputNames, outputNames, reindexOutputVars=False): - """Read an ONNX file and create a MarabouNetworkONNX object - - Args: - filename: (str): Path to the ONNX file - inputNames: (list of str): List of node names corresponding to inputs - outputNames: (list of str): List of node names corresponding to outputs - reindexOutputVars: (bool): Reindex the variables so that the output variables are immediate after input variables. + def readONNX(self, filename, inputNames=None, outputNames=None, reindexOutputVars=True, preserveExistingConstraints=False): + if not preserveExistingConstraints: + self.clear() - :meta private: - """ self.filename = filename self.graph = onnx.load(filename).graph - # Get default inputs/outputs if no names are provided - if not inputNames: + # Setup input node names + if inputNames is not None: + # Check that input are in the graph + for name in inputNames: + if not len([nde for nde in self.graph.node if name in nde.input]): + raise RuntimeError("Input %s not found in graph!" % name) + self.inputNames = inputNames + else: + # Get default inputs if no names are provided assert len(self.graph.input) >= 1 initNames = [node.name for node in self.graph.initializer] - inputNames = [inp.name for inp in self.graph.input if inp.name not in initNames] - if not outputNames: + self.inputNames = [inp.name for inp in self.graph.input if inp.name not in initNames] + + # Setup output node names + if outputNames is not None: + if isinstance(outputNames, str): + outputNames = [outputNames] + + # Check that outputs are in the graph + for name in outputNames: + if not len([nde for nde in self.graph.node if name in nde.output]): + raise RuntimeError("Output %s not found in graph!" % name) + self.outputNames = outputNames + else: + # Get all outputs if no names are provided assert len(self.graph.output) >= 1 initNames = [node.name for node in self.graph.initializer] - outputNames = [out.name for out in self.graph.output if out.name not in initNames] - elif isinstance(outputNames, str): - outputNames = [outputNames] + self.outputNames = [out.name for out in self.graph.output if out.name not in initNames] - # Check that input/outputs are in the graph - for name in inputNames: - if not len([nde for nde in self.graph.node if name in nde.input]): - raise RuntimeError("Input %s not found in graph!" % name) - for name in outputNames: - if not len([nde for nde in self.graph.node if name in nde.output]): - raise RuntimeError("Output %s not found in graph!" % name) + ONNXParser.parse(self, self.graph, self.inputNames, self.outputNames, reindexOutputVars=reindexOutputVars) - self.inputNames = inputNames - self.outputNames = outputNames + def getNode(self, nodeName): + """Find the node in the graph corresponding to the given name - # Process the shapes and values of the graph while making Marabou equations and constraints - self.foundnInputFlags = 0 - self.processGraph() + Args: + nodeName (str): Name of node to find in graph - # If the given inputNames/outputNames specify only a portion of the network, then we will have - # shape information saved not relevant to the portion of the network. Remove extra shapes. - self.cleanShapes() + Returns: + (str): ONNX node named nodeName - if reindexOutputVars: - # Other Marabou input parsers assign output variables immediately after input variables and before any - # intermediate variables. This function reassigns variable numbering to match other parsers. - # If this is skipped, the output variables will be the last variables defined. - self.reassignOutputVariables() - else: - for outputName in self.outputNames: - if outputName in self.constantMap: - raise RuntimeError("Output variable %s is a constant, not the output of equations!" % outputName) - self.outputVars = [self.varMap[outputName] for outputName in self.outputNames] + :meta private: + """ + nodes = [node for node in self.graph.node if nodeName in node.output] + assert len(nodes) == 1 + return nodes[0] def splitNetworkAtNode(self, nodeName, networkNamePreSplit=None, networkNamePostSplit=None): @@ -166,1338 +129,6 @@ def splitNetworkAtNode(self, nodeName, networkNamePreSplit=None, self.outputNames = [outputName] return True - def processGraph(self): - """Processes the ONNX graph to produce Marabou equations - - :meta private: - """ - # Add shapes for the graph's inputs - for node in self.graph.input: - self.shapeMap[node.name] = list([dim.dim_value if dim.dim_value > 0 else 1 for dim in node.type.tensor_type.shape.dim]) - - # If we find one of the specified inputs, create new variables - if node.name in self.inputNames: - self.madeGraphEquations += [node.name] - self.foundnInputFlags += 1 - self.makeNewVariables(node.name) - self.inputVars += [np.array(self.varMap[node.name])] - - # Add shapes for constants - for node in self.graph.initializer: - self.shapeMap[node.name] = list(node.dims) - self.madeGraphEquations += [node.name] - - # Recursively create remaining shapes and equations as needed - for outputName in self.outputNames: - self.makeGraphEquations(outputName, True) - - def makeGraphEquations(self, nodeName, makeEquations): - """Recursively populates self.shapeMap, self.varMap, and self.constantMap while adding equations and constraints - - Args: - nodeName (str): Name of node for making the shape - makeEquations (bool): Create Marabou equations for this node if True - - :meta private: - """ - if nodeName in self.madeGraphEquations: - return - - if nodeName in self.inputNames: - self.foundnInputFlags += 1 - # If an inputName is an intermediate layer of the network, we don't need to create Marabou - # equations for its inputs. However, we still need to call makeMarabouEquations in order to - # compute shapes. We just need to set the makeEquations flag to false - makeEquations = False - self.madeGraphEquations += [nodeName] - - # Recursively call makeGraphEquations, then call makeMarabouEquations - # This ensures that shapes and values of a node's inputs have been computed first - for inNodeName in self.getInputNodes(nodeName): - self.makeGraphEquations(inNodeName, makeEquations) - - # By this point, all input variables need to have been found - if self.foundnInputFlags != len(self.inputNames): - err_msg = "These input variables could not be found: %s"%(", ".join([inVar for inVar in self.inputNames if inVar not in self.varMap])) - raise RuntimeError(err_msg) - - # Compute node's shape and create Marabou equations as needed - self.makeMarabouEquations(nodeName, makeEquations) - - # Create new variables when we find one of the inputs - if nodeName in self.inputNames: - self.makeNewVariables(nodeName) - self.inputVars += [np.array(self.varMap[nodeName])] - - def makeMarabouEquations(self, nodeName, makeEquations): - """Compute the shape and values of a node assuming the input shapes and values have been computed already. - - Args: - nodeName (str): Name of node for which we want to compute the output shape - makeEquations (bool): Create Marabou equations for this node if True - - :meta private: - """ - node = self.getNode(nodeName) - if node.op_type == 'Constant': - self.constant(node) - elif node.op_type == 'Identity': - self.identity(node) - elif node.op_type == 'Cast': - self.cast(node) - elif node.op_type == 'Reshape': - self.reshape(node) - elif node.op_type == 'Flatten': - self.flatten(node) - elif node.op_type == "Transpose": - self.transpose(node) - elif node.op_type == 'Unsqueeze': - self.unsqueeze(node) - elif node.op_type == 'Squeeze': - self.squeeze(node) - elif node.op_type == "BatchNormalization": - self.batchNorm(node, makeEquations) - elif node.op_type == 'Concat': - self.concatEquations(node) - elif node.op_type == "MaxPool": - self.maxpoolEquations(node, makeEquations) - elif node.op_type == "Conv": - self.convEquations(node, makeEquations) - elif node.op_type == 'Gemm': - self.gemmEquations(node, makeEquations) - elif node.op_type == 'MatMul': - self.matMulEquations(node, makeEquations) - elif node.op_type == 'Mul': - self.mulEquations(node, makeEquations) - elif node.op_type == 'Add': - self.addEquations(node, makeEquations) - elif node.op_type == 'Relu': - self.reluEquations(node, makeEquations) - elif node.op_type == 'Sigmoid': - self.sigmoidEquations(node, makeEquations) - elif node.op_type == 'Split': - self.splitEquations(node, nodeName, makeEquations) - elif node.op_type == 'Resize': - self.resizeEquations(node, makeEquations) - elif node.op_type == 'Tanh': - self.tanhEquations(node, makeEquations) - elif node.op_type == 'LeakyRelu': - self.leakyReluEquations(node, makeEquations) - elif node.op_type == 'Softmax': - self.softmaxEquations(node, makeEquations) - elif node.op_type == 'Sub': - self.subEquations(node, makeEquations) - else: - raise NotImplementedError("Operation {} not implemented".format(node.op_type)) - - def getNode(self, nodeName): - """Find the node in the graph corresponding to the given name - - Args: - nodeName (str): Name of node to find in graph - - Returns: - (str): ONNX node named nodeName - - :meta private: - """ - nodes = [node for node in self.graph.node if nodeName in node.output] - assert len(nodes) == 1 - return nodes[0] - - def makeNewVariables(self, nodeName): - """Assuming the node's shape is known, return a set of new variables in the same shape - - Args: - nodeName (str): Name of node - - Returns: - (numpy array): Array of variable numbers - - :meta private: - """ - assert nodeName not in self.varMap - shape = self.shapeMap[nodeName] - size = np.prod(shape) - v = np.array([self.getNewVariable() for _ in range(size)]).reshape(shape) - self.varMap[nodeName] = v - assert all([np.equal(np.mod(i, 1), 0) for i in v.reshape(-1)]) # check if integers - return v - - def getInputNodes(self, nodeName): - """Get names of nodes that are inputs to the given node - - Args: - nodeName (str): Name of node - - Returns: - (list of str): Names of nodes that are inputs to the given node - - :meta private: - """ - node = self.getNode(nodeName) - inNodes = [] - for inp in node.input: - if len([nde for nde in self.graph.node if inp in nde.output]): - inNodes += [inp] - elif len([nde for nde in self.graph.initializer if nde.name == inp]): - self.constantMap[inp] = [numpy_helper.to_array(init) for init in self.graph.initializer if init.name == inp][0] - return inNodes - - def constant(self, node): - """Function representing a constant tensor - - Args: - node (node): ONNX node representing constant operation - - :meta private: - """ - nodeName = node.output[0] - for attr in node.attribute: - if attr.name == "value": - self.constantMap[nodeName] = numpy_helper.to_array(get_attribute_value(attr)) - return - raise RuntimeError("Could not find value of tensor constant") - - def identity(self, node): - """Function representing identity - - Args: - node (node): ONNX node representing identity operation - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - if inputName in self.varMap: - self.varMap[nodeName] = self.varMap[inputName] - elif inputName in self.constantMap: - self.constantMap[nodeName] = self.constantMap[inputName] - - def cast(self, node): - """Function representing cast - - Args: - node (node): ONNX node representing cast operation - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - - # Try to find type to cast to. If not found, raise error - to = None - for attr in node.attribute: - if attr.name == "to": - to = get_attribute_value(attr) - if to is None: - raise RuntimeError("Casting type not specified with attribute 'to'") - - # Cast input array to correct type, and throw error if type is unknown - if inputName in self.constantMap: - if to == TensorProto.FLOAT16: - self.constantMap[nodeName] = self.constantMap[inputName].astype('float16') - elif to == TensorProto.FLOAT: - self.constantMap[nodeName] = self.constantMap[inputName].astype('float32') - elif to == TensorProto.DOUBLE: - self.constantMap[nodeName] = self.constantMap[inputName].astype('double') - elif to == TensorProto.UINT8: - self.constantMap[nodeName] = self.constantMap[inputName].astype('uint8') - elif to == TensorProto.UINT16: - self.constantMap[nodeName] = self.constantMap[inputName].astype('uint16') - elif to == TensorProto.UINT32: - self.constantMap[nodeName] = self.constantMap[inputName].astype('uint32') - elif to == TensorProto.UINT64: - self.constantMap[nodeName] = self.constantMap[inputName].astype('uint64') - elif to == TensorProto.INT8: - self.constantMap[nodeName] = self.constantMap[inputName].astype('int8') - elif to == TensorProto.INT16: - self.constantMap[nodeName] = self.constantMap[inputName].astype('int16') - elif to == TensorProto.INT32: - self.constantMap[nodeName] = self.constantMap[inputName].astype('int32') - elif to == TensorProto.INT64: - self.constantMap[nodeName] = self.constantMap[inputName].astype('int64') - else: - err_msg = "Unknown type for casting: %d\n" % to - err_msg += "Check here for ONNX TensorProto: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto" - raise NotImplementedError(err_msg) - - # We shouldn't be casting variables to different types, since Marabou assumes variables have double precision - elif inputName in self.varMap: - raise NotImplementedError("Casting variables not allowed with Marabou") - - def reshape(self, node): - """Function representing reshape - - Args: - node (node): ONNX node representing reshape operation - - :meta private: - """ - nodeName = node.output[0] - inputName1, inputName2 = node.input - - # Assume first input is array to be reshaped, second input is the new shape array - reshapeVals = self.constantMap[inputName2] - self.shapeMap[nodeName] = list(np.zeros(self.shapeMap[inputName1]).reshape(reshapeVals).shape) - if inputName1 in self.varMap: - self.varMap[nodeName] = copy(self.varMap[inputName1]).reshape(reshapeVals) - elif inputName1 in self.constantMap: - self.constantMap[nodeName] = self.constantMap[inputName1].reshape(reshapeVals) - - def flatten(self, node): - """Function representing flatten - - Unlike numpy.flatten(), ONNX's Flatten operation reshapes - a (d_0, d_1, ..., d_n) tensor into a 2D tensor with shape - (d_0 * d_1 * ... * d_(axis-1), d_axis * d_(axis+1) * ... * d_n). - - Args: - node (node): ONNX node representing flatten operation - - :meta private: - """ - nodeName = node.output[0] - - # Assume first input is array to be flattened - inputName = node.input[0] - axis = 1 - for attr in node.attribute: - if attr.name == "axis": - axis = get_attribute_value(attr) - - dimension1 = int(np.prod(self.shapeMap[inputName][:axis])) - dimension2 = int(np.prod(self.shapeMap[inputName][axis:])) - newShape = [dimension1, dimension2] - self.shapeMap[nodeName] = newShape - - if inputName in self.varMap: - self.varMap[nodeName] = self.varMap[inputName].reshape(newShape) - elif inputName in self.constantMap: - self.constantMap[nodeName] = self.constantMap[inputName].reshape(newShape) - - def transpose(self, node): - """Function representing transpose - - Args: - node (node): ONNX node representing transpose operation - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - - # Get attributes - perm = None - for attr in node.attribute: - if attr.name == "perm": - perm = get_attribute_value(attr) - if perm is None: - raise RuntimeError("Permutation indices not specified by attibute 'perm'") - self.shapeMap[nodeName] = [self.shapeMap[inputName][p] for p in perm] - if inputName in self.varMap: - self.varMap[nodeName] = \ - np.transpose(self.varMap[node.input[0]].reshape(self.shapeMap[node.input[0]]), - perm) - elif inputName in self.constantMap: - self.constantMap[nodeName] = np.transpose(self.constantMap[inputName], perm) - - def unsqueeze(self, node): - """Function representing unsqueeze - - Args: - node (node): ONNX node representing unsqueeze operation - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - axes = self.constantMap[node.input[1]] - - if inputName in self.varMap: - output_data = Unsqueeze_1.eval(self.varMap[inputName], axes=axes) - self.shapeMap[nodeName] = output_data.shape - self.varMap[nodeName] = output_data - else: - output_data = Unsqueeze_1.eval(self.constantMap[inputName], axes=axes) - self.shapeMap[nodeName] = output_data.shape - self.constantMap[nodeName] = output_data - - - def squeeze(self, node): - """Function representing squeeze - - Args: - node (node): ONNX node representing squeeze operation - - :meta private: - """ - nodeName = node.output[0] - inputName, axisName = node.input - - axis = self.constantMap[axisName] - - axis_ = copy(axis) - if inputName in self.varMap: - vars = copy(self.varMap[inputName]) - for i in range(len(axis)): - vars = np.squeeze(vars, axis_[i]) - for j in range(len(axis))[i+1:]: - axis_[j] -= 1 - self.varMap[nodeName] = vars - self.shapeMap[nodeName] = vars.shape - return - - def batchNorm(self, node, makeEquations): - """Function to generate equations for a BatchNormalization - - Args: - node (node): ONNX node representing the BatchNormalization operation - - :meta private - """ - - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - - # Get attributes - epsilon = None - for attr in node.attribute: - if attr.name == "epsilon": - epsilon = get_attribute_value(attr) - - # Get inputs - scales = self.constantMap[node.input[1]].reshape(-1) - biases = self.constantMap[node.input[2]].reshape(-1) - input_means = self.constantMap[node.input[3]].reshape(-1) - input_variances = self.constantMap[node.input[4]].reshape(-1) - - if not makeEquations: - return - - numChannels = len(scales) - - # Get variables - inputVars = self.varMap[inputName].reshape(numChannels, -1) - outputVars = self.makeNewVariables(nodeName).reshape(numChannels, -1) - assert(inputVars.shape == outputVars.shape) - - numInputs = inputVars.shape[1] - - for i in range(numChannels): - for j in range(numInputs): - # Add equation - # To know this computation, - # refer to https://github.com/onnx/onnx/blob/master/docs/Operators.md#batchnormalization. - e = MarabouUtils.Equation() - e.addAddend(1 / np.sqrt(input_variances[i] + epsilon) * scales[i], inputVars[i][j]) - e.addAddend(-1, outputVars[i][j]) - e.setScalar(input_means[i] / np.sqrt(input_variances[i] + epsilon) * scales[i] - biases[i]) - self.addEquation(e) - - def maxpoolEquations(self, node, makeEquations): - """Function to generate maxpooling equations - - Args: - node (node): ONNX node representing maxpool operation - makeEquations (bool): True if we need to create new variables and maxpool constraints - - :meta private: - """ - nodeName = node.output[0] - - # Extract attributes and define shape - inputShape = self.shapeMap[node.input[0]] - kernel_shape = [1, 1] - strides = [1, 1] - for attr in node.attribute: - if attr.name == 'kernel_shape': - kernel_shape = get_attribute_value(attr) - elif attr.name == 'strides': - strides = get_attribute_value(attr) - - outputShape = [dim for dim in inputShape] - outputShape[2] = int(np.ceil((inputShape[2] - ((kernel_shape[0] - 1) + 1) + 1) / strides[0])) - outputShape[3] = int(np.ceil((inputShape[3] - ((kernel_shape[1] - 1) + 1) + 1) / strides[1])) - self.shapeMap[nodeName] = outputShape - - if not makeEquations: - return - - inVars = self.varMap[node.input[0]] - outVars = self.makeNewVariables(nodeName) - for i in range(outputShape[2]): - for j in range(outputShape[3]): - for k in range(outputShape[1]): - maxVars = set() - for di in range(strides[0]*i, strides[0]*i + kernel_shape[0]): - for dj in range(strides[1]*j, strides[1]*j + kernel_shape[1]): - if di < inputShape[2] and dj < inputShape[3]: - maxVars.add(inVars[0][k][di][dj]) - self.addMaxConstraint(maxVars, outVars[0][k][i][j]) - - def softmaxEquations(self, node, makeEquations): - """Function to generate constraints for softmax - - Args: - node (node): ONNX node representing the softmax operation - makeEquations (bool): True if we need to create new variables and maxpool constraints - - :meta private: - """ - nodeName = node.output[0] - - # Extract attributes and define shape - inputShape = self.shapeMap[node.input[0]] - for attr in node.attribute: - if attr.name == 'axis': - axis = get_attribute_value(attr) - - self.shapeMap[nodeName] = inputShape - - if not makeEquations: - return - - inVars = self.varMap[node.input[0]] - outVars = self.makeNewVariables(nodeName) - - if len(inputShape) == 2 and inputShape[0] == 1: - self.addSoftmaxConstraint(list(np.array(inVars).flatten()), list(np.array(outVars).flatten())) - else: - axis = ( len(inputShape) + axis ) % len(inputShape) - perm = [] - for i, s in enumerate(inputShape): - if i == axis: - continue - perm.append(i) - perm.append(axis) - - inVarsReshaped = np.transpose(inVars, perm).reshape(-1, inputShape[axis]) - outVarsReshaped = np.transpose(outVars, perm).reshape(-1, inputShape[axis]) - for i in range(inVarsReshaped.shape[0]): - self.addSoftmaxConstraint(inVarsReshaped[i], outVarsReshaped[i]) - - def convEquations(self, node, makeEquations): - """Function to generate equations for a 2D convolution - - Args: - node (node): ONNX node representing the 2D Convolution operation - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - nodeName = node.output[0] - - # Extract information about convolution - strides = [1, 1] - pads = [0, 0, 0, 0] - for attr in node.attribute: - if attr.name == 'strides': - strides = get_attribute_value(attr) - elif attr.name == 'pads': - pads = get_attribute_value(attr) - pad_left, pad_bottom, pad_right, pad_top = pads - - # Get input shape information - # First input should be variable tensor, the second a weight matrix defining filters - shape0 = self.shapeMap[node.input[0]] - shape1 = self.shapeMap[node.input[1]] - input_channels = shape0[1] - input_width = shape0[2] - input_height = shape0[3] - num_filters = shape1[0] - filter_channels = shape1[1] - filter_width = shape1[2] - filter_height = shape1[3] - - # The third input is optional and specifies a bias for each filter - # Bias is 0 if third input is not given - biases = np.zeros(num_filters) - if len(node.input) == 3: - biases = self.constantMap[node.input[2]] - - # The number of channels should match between input variable and filters - assert input_channels == filter_channels - - # Compute output shape - out_width = (input_width - filter_width + pad_left + pad_right) // strides[0] + 1 - out_height = (input_height - filter_height + pad_bottom + pad_top) // strides[1] + 1 - out_channels = num_filters - self.shapeMap[nodeName] = [shape0[0], out_channels, out_width, out_height] - - if not makeEquations: - return - - inVars = self.varMap[node.input[0]] - weights = self.constantMap[node.input[1]] - outVars = self.makeNewVariables(nodeName) - - ### Generate actual equations ### - # There is one equation for every output variable - for i in range(out_width): - for j in range(out_height): - for k in range(out_channels): # Out_channel corresponds to filter number - e = MarabouUtils.Equation() - - # The equation convolves the filter with the specified input region - # Iterate over the filter - for di in range(filter_width): - for dj in range(filter_height): - for dk in range(filter_channels): - w_ind = int(strides[0]*i+di - pad_left) - h_ind = int(strides[1]*j+dj - pad_bottom) - if h_ind < input_height and h_ind >= 0 and w_ind < input_width and w_ind >= 0: - var = inVars[0][dk][w_ind][h_ind] - c = weights[k][dk][di][dj] - e.addAddend(c, var) - - # Add output variable - e.addAddend(-1, outVars[0][k][i][j]) - e.setScalar(-biases[k]) - self.addEquation(e) - - def gemmEquations(self, node, makeEquations): - """Function to generate equations corresponding to Gemm (general matrix multiplication) - - Args: - node (node): ONNX node representing the Gemm operation - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - nodeName = node.output[0] - - # Get inputs - if len(node.input) == 3: - inputName1, inputName2, inputName3 = node.input - else: - inputName1, inputName2 = node.input - inputName3 = None - shape1 = self.shapeMap[inputName1] - shape2 = self.shapeMap[inputName2] - - # Transpose first two inputs if needed, - # and save scaling parameters alpha and beta if set - alpha = 1.0 - beta = 1.0 - transA = 0 - transB = 0 - for attr in node.attribute: - if attr.name == 'transA': - transA = get_attribute_value(attr) - elif attr.name == 'transB': - transB = get_attribute_value(attr) - elif attr.name == 'alpha': - alpha = get_attribute_value(attr) - elif attr.name == 'beta': - beta = get_attribute_value(attr) - - if transA: - shape1 = shape1[::-1] - if transB: - shape2 = shape2[::-1] - outShape = [shape1[0], shape2[1]] - self.shapeMap[nodeName] = outShape - if not makeEquations: - return - - # Assume that first input is variables, second is Matrix for MatMul, and third is bias addition - input1 = self.varMap[inputName1] - input2 = self.constantMap[inputName2] - if inputName3: - input3 = self.constantMap[inputName3] - - # Transpose inputs - if transA: - input1 = np.transpose(input1) - if transB: - input2 = np.transpose(input2) - if inputName3: - input3 = np.broadcast_to(input3, outShape) - - assert shape1[-1] == shape2[0] - assert shape1[0] == outShape[0] - assert shape2[1] == outShape[1] - - # Create new variables - outputVariables = self.makeNewVariables(nodeName) - # Generate equations - for i in range(shape1[0]): - for j in range(shape2[1]): - e = MarabouUtils.Equation() - for k in range(shape1[1]): - e.addAddend(input2[k][j]*alpha, input1[i][k]) - - # Put output variable as the last addend last - e.addAddend(-1, outputVariables[i][j]) - if inputName3: - e.setScalar(-input3[i][j]*beta) - else: - e.setScalar(0) - self.addEquation(e) - - - def matMulEquations(self, node, makeEquations): - """Function to generate equations corresponding to matrix multiplication - - Args: - node (node): ONNX node representing the MatMul operation - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - nodeName = node.output[0] - - # Get inputs and determine which inputs are constants and which are variables - inputName1, inputName2 = node.input - shape1 = self.shapeMap[inputName1] - shape2 = self.shapeMap[inputName2] - if len(shape1) > 2 and shape1[0] == 1: - shape1 = shape1[1:] - elif len(shape1) == 1: - # Broadcast first input to make sure the first input is a matrix - shape1 = [1] + shape1 - - if len(shape2) > 2 and shape2[0] == 1: - shape2 = shape2[1:] - a = np.zeros(shape1) - b = np.zeros(shape2) - c = np.matmul(a, b) - outshape = c.shape - self.shapeMap[nodeName] = outshape - if not makeEquations: - return - - firstInputConstant = False; secondInputConstant = False - if inputName1 in self.constantMap: - input1 = self.constantMap[inputName1] - firstInputConstant = True - else: - input1 = self.varMap[inputName1] - - if inputName2 in self.constantMap: - input2 = self.constantMap[inputName2] - secondInputConstant = True - else: - input2 = self.varMap[inputName2] - - input1 = input1.reshape(shape1) - input2 = input2.reshape(shape2) - - # If both inputs are constant, than the output is constant as well, and we don't need new variables or equations - if firstInputConstant and secondInputConstant: - self.constantMap[nodeName] = np.matmul(input1,input2) - return - - # Create new variables - outputVariables = self.makeNewVariables(nodeName) - - if not firstInputConstant and not secondInputConstant: - # bi-linear constraints - self.addBilinearConstraints(shape1, shape2, input1, input2, outputVariables) - else: - # Generate equations - for i in range(shape1[0]): - # Differentiate between matrix-vector multiplication and matrix-matrix multiplication - if len(shape2)>1: - for j in range(shape2[1]): - e = MarabouUtils.Equation() - for k in range(shape1[1]): - if firstInputConstant: - e.addAddend(input1[i][k], input2[k][j]) - else: - e.addAddend(input2[k][j], input1[i][k]) - - # Put output variable as the last addend last - e.addAddend(-1, outputVariables[i][j]) - e.setScalar(0.0) - self.addEquation(e) - else: - e = MarabouUtils.Equation() - for k in range(shape1[1]): - if firstInputConstant: - e.addAddend(input1[i][k], input2[k]) - else: - e.addAddend(input2[k], input1[i][k]) - - # Put output variable as the last addend last - e.addAddend(-1, outputVariables[i]) - e.setScalar(0.0) - self.addEquation(e) - - def addBilinearConstraints(self, shape1, shape2, input1, input2, outputVariables): - # Generate equations - if len(shape2) == 2: - for i in range(shape1[0]): - for j in range(shape2[1]): - e = MarabouUtils.Equation() - for k in range(shape1[1]): - v = self.getNewVariable() - self.addBilinear(input1[i][k], input2[k][j], v) - e.addAddend(1, v) - - # Put output variable as the last addend last - e.addAddend(-1, outputVariables[i][j]) - e.setScalar(0.0) - self.addEquation(e) - elif len(shape2) == 3: - assert(shape1[0] == shape2[0]) - for l in range(shape1[0]): - for i in range(shape1[1]): - for j in range(shape2[2]): - e = MarabouUtils.Equation() - for k in range(shape1[2]): - v = self.getNewVariable() - self.addBilinear(input1[l][i][k], input2[l][k][j], v) - e.addAddend(1, v) - - # Put output variable as the last addend last - e.addAddend(-1, outputVariables[l][i][j]) - e.setScalar(0.0) - self.addEquation(e) - else: - raise RuntimeError(f"Unsupported shape in matMul tensor: {shape2}") - - def concatEquations(self, node): - """Function to generate equations corresponding to concat - - Args: - node (node): ONNX node representing the Concat operation - - :meta private: - """ - nodeName = node.output[0] - - # Get attributes - axis = None - for attr in node.attribute: - if attr.name == "axis": - axis = get_attribute_value(attr) - - # Set maps of shape and var - inputVars = list([self.varMap[input] for input in node.input]) - outputVars = np.concatenate(inputVars, axis) - self.shapeMap[nodeName] = outputVars.shape - self.varMap[nodeName] = outputVars - - def splitEquations(self, node, nodeName, makeEquations): - """Function to generate equations corresponding to split - - Args: - node (node): ONNX node representing the Split operation - nodeName (str): Name of target node - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - # Get attributes - axis = None - split = None - for attr in node.attribute: - if attr.name == "axis": - axis = get_attribute_value(attr) - if attr.name == "split": - split = get_attribute_value(attr) - - inputName = node.input[0] - inputVars = Split_18.eval(self.varMap[inputName], split=split, axis=axis) - - assert len(inputVars) == len(node.output) - - # Set a shape of target output - for i in range(len(node.output)): - if node.output[i] == nodeName: - self.shapeMap[node.output[i]] = inputVars[i].shape - break - - if not makeEquations: - return - - # Get variables and add quations - for i in range(len(node.output)): - if node.output[i] == nodeName: - reshapedInputVars = inputVars[i].reshape(-1) - outputVars = self.makeNewVariables(node.output[i]).reshape(-1) - for j in range(len(reshapedInputVars)): - # Add equation - e = MarabouUtils.Equation() - e.addAddend(1, reshapedInputVars[j]) - e.addAddend(-1, outputVars[j]) - e.setScalar(0) - self.addEquation(e) - break - - def resizeEquations(self, node, makeEquations): - """Function to generate equations corresponding to resize - - Args: - node (node): ONNX node representing the Resize operation - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - - # Check number of dimension of input - inputVars = self.varMap[inputName] - inputShape = inputVars.shape - if inputVars.ndim != 4: - raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") - - # Get and check attributes - coordinate_transformation_mode = None - cubic_coeff_a = None - mode = None - nearest_mode = None - - for attr in node.attribute: - value = get_attribute_value(attr) - if attr.name == "coordinate_transformation_mode" and value.decode() == "asymmetric": - coordinate_transformation_mode = value - elif attr.name == "cubic_coeff_a" and value == -0.75: - cubic_coeff_a = value - elif attr.name == "mode" and value.decode() == "nearest": - mode = value - elif attr.name == "nearest_mode" and value.decode() == "floor": - nearest_mode = value - else: - # Marabou supports Resize only very specific case below. - # coordinate_transformation_mode: asymmetric - # cubic_coeff_a: -0.75 - # mode: nearest - # nearest_mode: floor - # There are many cases other than the above case according to https://github.com/onnx/onnx/blob/main/docs/Operators.md#resize - # Please note that we should carefully expand this operation beyond this case. - raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") - - # Get scales - scales = None - if len(node.input) == 3 and np.all(self.constantMap[node.input[2]] == [1., 1., 2., 2.]): - scales = [1, 1, 2, 2] - else: - raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") - - # Set output shape - outputShape = (inputShape[0], inputShape[1], inputShape[2] * scales[2], inputShape[3] * scales[3]) - self.shapeMap[nodeName] = outputShape - - if not makeEquations: - return - - # Get variables - outputVars = self.makeNewVariables(nodeName) - - assert scales[2] * scales[3] * inputVars.size == outputVars.size - - for i in range(outputShape[1]): - for j in range(outputShape[2]): - for k in range(outputShape[3]): - # Add equation - e = MarabouUtils.Equation() - e.addAddend(1, inputVars[0][i][int(j / 2)][int(k / 2)]) - e.addAddend(-1, outputVars[0][i][j][k]) - e.setScalar(0) - self.addEquation(e) - - def mulEquations(self, node, makeEquations): - nodeName = node.output[0] - - # Get the inputs - inputName1, inputName2 = node.input - shape1 = self.shapeMap[inputName1] - # shape2 = self.shapeMap[inputName2] # comment out since this is never used. - - - # Get the broadcasted shape - outShape = shape1 - self.shapeMap[nodeName] = outShape - if not makeEquations: - return - - multiple = self.constantMap[inputName2] - input1 = self.varMap[inputName1] - outputVariables = self.makeNewVariables(nodeName) - input1 = input1.reshape(-1) - outputVariables = outputVariables.reshape(-1) - - for i in range(len(input1)): - e = MarabouUtils.Equation() - e.addAddend(multiple, input1[i]) - e.addAddend(-1, outputVariables[i]) - e.setScalar(0.0) - self.addEquation(e) - return - - def addEquations(self, node, makeEquations): - """Function to generate equations corresponding to addition - - Args: - node (node): ONNX node representing the Add operation - makeEquations (bool): True if we need to create new variables and write Marabou equations - - :meta private: - """ - nodeName = node.output[0] - - # Get the inputs - inputName1, inputName2 = node.input - shape1 = self.shapeMap[inputName1] - shape2 = self.shapeMap[inputName2] - - # Get the broadcasted shape - outShape = getBroadcastShape(shape1, shape2) - self.shapeMap[nodeName] = outShape - if not makeEquations: - return - - # Decide which inputs are variables and which are constants - firstInputConstant = False; secondInputConstant = False - if inputName1 in self.constantMap: - firstInputConstant = True - input1 = self.constantMap[inputName1] - else: - input1 = self.varMap[inputName1] - - if inputName2 in self.constantMap: - secondInputConstant = True - input2 = self.constantMap[inputName2] - else: - input2 = self.varMap[inputName2] - - # Broadcast inputs to ensure the shapes match - input1 = np.broadcast_to(input1, outShape) - input2 = np.broadcast_to(input2, outShape) - - # The shape after broadcasting must match - assert input1.shape == input2.shape - - # If both inputs to add are constant, then the output is constant too - # No new variables are needed, we just need to store the output in constantMap - if firstInputConstant and secondInputConstant: - self.constantMap[nodeName] = input1 + input2 - return - - # If both inputs are variables, then we need a new variable to represent - # the sum of the two variables - elif not firstInputConstant and not secondInputConstant: - outputVariables = self.makeNewVariables(nodeName) - input1 = input1.reshape(-1) - input2 = input2.reshape(-1) - outputVariables = outputVariables.reshape(-1) - for i in range(len(input1)): - e = MarabouUtils.Equation() - e.addAddend(1, input1[i]) - e.addAddend(1, input2[i]) - e.addAddend(-1, outputVariables[i]) - e.setScalar(0.0) - self.addEquation(e) - return - - # Otherwise, we are adding constants to variables. - # We don't need new equations or new variables if the input variable is the output of a linear equation. - # Instead, we can just edit the scalar term of the existing linear equation. - # However, if the input variables are not outputs of linear equations (input variables or outputs of - # activation functions) then we will need new equations. - if firstInputConstant: - constInput = input1 - varInput = input2 - else: - constInput = input2 - varInput = input1 - constInput = constInput.reshape(-1) - varInput = varInput.reshape(-1) - - # Adjust equations to incorporate the constant addition - numEquationsChanged = 0 - for equ in self.equList: - (c,var) = equ.addendList[-1] - assert c == -1 - if var in varInput: - ind = np.where(var == varInput)[0][0] - - # Adjust the equation - equ.setScalar(equ.scalar-constInput[ind]) - numEquationsChanged += 1 - - # If we changed one equation for every input variable, then - # we don't need any new equations - if numEquationsChanged == len(varInput): - self.varMap[nodeName] = copy(varInput).reshape(outShape) - else: - # Otherwise, assert no equations were changed, and we need to create new equations - assert numEquationsChanged == 0 - outputVariables = self.makeNewVariables(nodeName).reshape(-1) - for i in range(len(outputVariables)): - e = MarabouUtils.Equation() - e.addAddend(1, varInput[i]) - e.addAddend(-1, outputVariables[i]) - e.setScalar(-constInput[i]) - self.addEquation(e) - - def reluEquations(self, node, makeEquations): - """Function to generate equations corresponding to pointwise Relu - - Args: - node (node): ONNX node representing the Relu operation - makeEquations (bool): True if we need to create new variables and add new Relus - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - if not makeEquations: - return - - # Get variables - inputVars = self.varMap[inputName].reshape(-1) - outputVars = self.makeNewVariables(nodeName).reshape(-1) - assert len(inputVars) == len(outputVars) - - # Generate equations - for i in range(len(inputVars)): - self.addRelu(inputVars[i], outputVars[i]) - for f in outputVars: - self.setLowerBound(f, 0.0) - - def leakyReluEquations(self, node, makeEquations): - """Function to generate equations corresponding to pointwise LeakyRelu - Args: - node (node): ONNX node representing the LeakyRelu operation - makeEquations (bool): True if we need to create new variables and add new LeakyRelus - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - if not makeEquations: - return - - alpha = 0.01 - for attr in node.attribute: - if attr.name == 'alpha': - alpha = get_attribute_value(attr) - - # Get variables - inputVars = self.varMap[inputName].reshape(-1) - outputVars = self.makeNewVariables(nodeName).reshape(-1) - assert len(inputVars) == len(outputVars) - - # Generate equations - for i in range(len(inputVars)): - self.addLeakyRelu(inputVars[i], outputVars[i], alpha) - - def subEquations(self, node, makeEquations): - """Function to generate equations corresponding to subtraction - - Args: - node (node): ONNX node representing the Sub operation - makeEquations (bool): True if we need to create new variables and add new Relus - - :meta private: - """ - nodeName = node.output[0] - inputName1, inputName2 = node.input[0], node.input[1] - assert inputName1 in self.shapeMap and inputName2 in self.shapeMap - assert self.shapeMap[inputName1] == self.shapeMap[inputName2] - self.shapeMap[nodeName] = self.shapeMap[inputName1] - - if not makeEquations: - return - - assert inputName1 in self.varMap and inputName2 in self.constantMap - - # Get variables - inputVars = self.varMap[inputName1].reshape(-1) - outputVars = self.makeNewVariables(nodeName).reshape(-1) - constants = self.constantMap[inputName2].reshape(-1) - assert len(inputVars) == len(outputVars) == len(constants) - - # Generate equations - for i in range(len(inputVars)): - e = MarabouUtils.Equation() - e.addAddend(1, inputVars[i]) - e.addAddend(-1, outputVars[i]) - e.setScalar(-constants[i]) - self.addEquation(e) - - def sigmoidEquations(self, node, makeEquations): - """Function to generate equations corresponding to Sigmoid - - Args: - node (node): ONNX node representing the Sigmoid operation - makeEquations (bool): True if we need to create new variables and add new Sigmoids - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - if not makeEquations: - return - - # Get variables - inputVars = self.varMap[inputName].reshape(-1) - outputVars = self.makeNewVariables(nodeName).reshape(-1) - assert len(inputVars) == len(outputVars) - - # Generate equations - for i in range(len(inputVars)): - self.addSigmoid(inputVars[i], outputVars[i]) - for f in outputVars: - self.setLowerBound(f, 0.0) - self.setUpperBound(f, 1.0) - - def tanhEquations(self, node, makeEquations): - """Function to generate equations corresponding to Tanh - - Args: - node (node): ONNX node representing the Tanh operation - makeEquations (bool): True if we need to create new variables and add new Tanhs - - :meta private: - """ - nodeName = node.output[0] - inputName = node.input[0] - self.shapeMap[nodeName] = self.shapeMap[inputName] - if not makeEquations: - return - - # Get variables - inputVars = self.varMap[inputName].reshape(-1) - outputVars = self.makeNewVariables(nodeName).reshape(-1) - assert len(inputVars) == len(outputVars) - firstAffine = np.array([self.getNewVariable() for i in range(outputVars.size)]) - sigmoidOutput = np.array([self.getNewVariable() for i in range(outputVars.size)]) - - # Generate equations - for i in range(len(inputVars)): # tanh(x) = 2 * \sigmoid(2x) - 1 - self.addEquality([inputVars[i], firstAffine[i]], [2.0, -1.0], 0.0, isProperty=False) - self.addSigmoid(firstAffine[i], sigmoidOutput[i]) - self.addEquality([sigmoidOutput[i], outputVars[i]], [2.0, -1.0], 1.0, isProperty=False) - for f in outputVars: - self.setLowerBound(f, -1.0) - self.setUpperBound(f, 1.0) - - def cleanShapes(self): - """Remove unused shapes - - After constructing equations, remove shapes from self.shapeMap that are part of the graph but not - relevant for this input query. This is only cosmetic and does not impact Marabou. - - :meta private: - """ - for nodeName in [name for name in self.shapeMap]: - if nodeName not in self.varMap and nodeName not in self.constantMap: - self.shapeMap.pop(nodeName) - - def reassignVariable(self, var, numInVars, outVars, newOutVars): - """Reassign output variable so that output variables follow input variables - - This function computes what the given variable should be when the output variables are - moved to come after the input variables. - - Args: - var (int): Original variable number - numInVars (int): Number of input variables - outVars (numpy array of int): Original output variables - newOutVars (numpy array of int): New output variables - - Returns: - (int): New variable assignment - - :meta private: - """ - if var < numInVars: - return var - if var in outVars: - ind = np.where(var == outVars)[0][0] - return newOutVars[ind] - return var + len([outVar for outVar in outVars if outVar > var]) - - def reassignOutputVariables(self): - """Reassign output variables so output variable numbers follow input variable numbers - - Other input parsers assign output variables after input variables and before any intermediate variables. - This function reassigns the numbers for the output variables and shifts all other variables up to make space. - - :meta private: - """ - for outputName in self.outputNames: - if outputName in self.constantMap: - raise RuntimeError("Output variable %s is a constant, not the output of equations!" % outputName) - outVars = np.concatenate([self.varMap[outputName].reshape(-1) for outputName in self.outputNames]) - numInVars = np.sum([np.prod(self.shapeMap[inputName]) for inputName in self.inputNames]) - numOutVars = len(outVars) - newOutVars = np.array(range(numInVars, numInVars+numOutVars)) - - # Adjust equation variables - for eq in self.equList: - for i, (c,var) in enumerate(eq.addendList): - eq.addendList[i] = (c, self.reassignVariable(var, numInVars, outVars, newOutVars)) - - # Adjust equation variables - for eq in self.additionalEquList: - for i, (c,var) in enumerate(eq.addendList): - eq.addendList[i] = (c, self.reassignVariable(var, numInVars, outVars, newOutVars)) - - # Adjust relu list - for i, variables in enumerate(self.reluList): - self.reluList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) - - # Adjust relu list - for i, variables in enumerate(self.leakyReluList): - self.leakyReluList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) - - # Adjust sigmoid list - for i, variables in enumerate(self.sigmoidList): - self.sigmoidList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) - - # Adjust bilinear list - for i, variables in enumerate(self.bilinearList): - self.bilinearList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) - - # Adjust softmax list - for i, (inputs, outputs) in enumerate(self.softmaxList): - newInputs = [] - for var in inputs: - newInputs.append(self.reassignVariable(var, numInVars, outVars, newOutVars)) - newOutputs = [] - for var in outputs: - newOutputs.append(self.reassignVariable(var, numInVars, outVars, newOutVars)) - - self.softmaxList[i] = (newInputs, newOutputs) - - # Adjust max pool list - for i, (elements, outVar) in enumerate(self.maxList): - newOutVar = self.reassignVariable(outVar, numInVars, outVars, newOutVars) - newElements = set() - for var in elements: - newElements.add(self.reassignVariable(var, numInVars, outVars, newOutVars)) - self.maxList[i] = (newElements, newOutVar) - - # Adjust upper/lower bounds - newLowerBounds = dict() - newUpperBounds = dict() - for var in self.lowerBounds: - newLowerBounds[self.reassignVariable(var, numInVars, outVars, newOutVars)] = self.lowerBounds[var] - for var in self.upperBounds: - newUpperBounds[self.reassignVariable(var, numInVars, outVars, newOutVars)] = self.upperBounds[var] - self.lowerBounds = newLowerBounds - self.upperBounds = newUpperBounds - - # Assign output variables to the new array - for outputName in self.outputNames: - numVars = len(self.varMap[outputName].reshape(-1)) - self.varMap[outputName] = newOutVars[:numVars].reshape(self.shapeMap[outputName]) - newOutVars = newOutVars[numVars:] - - self.outputVars = [self.varMap[outputName] for outputName in self.outputNames] - def evaluateWithoutMarabou(self, inputValues): """Try to evaluate the network with the given inputs using ONNX @@ -1538,18 +169,4 @@ def evaluateWithoutMarabou(self, inputValues): else: raise NotImplementedError("Inputs to network expected to be of type 'float', not %s" % onnxType) input_dict[inputName] = inputValues[i].reshape(self.inputVars[i].shape).astype(inputType) - return sess.run(self.outputNames, input_dict) - -def getBroadcastShape(shape1, shape2): - """Helper function to get the shape that results from broadcasting these shapes together - - Args: - shape1 (list of int): First shape - shape2 (list of int): Second shape - - Returns: - (list of int): Broadcast shape - - :meta private: - """ - return [l1 if l1 == l2 else max(l1, l2) for l1, l2 in itertools.zip_longest(shape1[::-1], shape2[::-1], fillvalue=1)][::-1] + return sess.run(self.outputNames, input_dict) \ No newline at end of file diff --git a/maraboupy/docs/Develop/3_Tests.md b/maraboupy/docs/Develop/3_Tests.md index e2d6561202..1b32d5df49 100644 --- a/maraboupy/docs/Develop/3_Tests.md +++ b/maraboupy/docs/Develop/3_Tests.md @@ -16,6 +16,10 @@ directory. When adding new tests or test files, try to follow the syntax of othe This page covers some basic information about how to write these tests. More information can be found in the [pytest](https://docs.pytest.org/en/stable/) documentation. +## Running tests + +Tests can be run by navigating to the `maraboupy` folder and running the command `pytest`. + ## Writing a test When writing a test, make sure that pytest has been imported. Also, relative imports are used to import Marabou so that the tests can be run from any directory regardless of if PYTHONPATH points to the Marabou directory. diff --git a/maraboupy/parsers/InputQueryBuilder.py b/maraboupy/parsers/InputQueryBuilder.py new file mode 100644 index 0000000000..1d4593dd3b --- /dev/null +++ b/maraboupy/parsers/InputQueryBuilder.py @@ -0,0 +1,388 @@ +''' +Top contributors (to current version): + - Christopher Lazarus + - Shantanu Thakoor + - Andrew Wu + - Kyle Julian + - Teruhiro Tagomori + - Min Wu + +This file is part of the Marabou project. +Copyright (c) 2017-2019 by the authors listed in the file AUTHORS +in the top-level source directory) and their institutional affiliations. +All rights reserved. See the file COPYING in the top-level source +directory for licensing information. +''' + +from maraboupy import MarabouCore +from maraboupy import MarabouUtils +from maraboupy.MarabouPythonic import * +from abc import ABC + +class InputQueryBuilder(ABC): + """ + Abstract class for building up an input query. + Should eventually be implemented by a renamed `NetworkParser.cpp` on the C++ side. + """ + def __init__(self): + """ + Constructs a MarabouNetwork object and calls function to initialize + """ + self.clear() + + def clear(self): + """Reset values to represent empty network + """ + self.numVars = 0 + self.equList = [] + self.additionalEquList = [] # used to store user defined equations + self.reluList = [] + self.leakyReluList = [] + self.sigmoidList = [] + self.maxList = [] + self.softmaxList = [] + self.bilinearList = [] + self.absList = [] + self.signList = [] + self.disjunctionList = [] + self.lowerBounds = dict() + self.upperBounds = dict() + self.inputVars = [] + self.outputVars = [] + + def clearProperty(self): + """Clear the lower bounds and upper bounds map, and the self.additionEquList + """ + self.lowerBounds.clear() + self.upperBounds.clear() + self.additionalEquList.clear() + + def getNewVariable(self): + """Function to create a new variable + + Returns: + (int): New variable number + + :meta private: + """ + self.numVars += 1 + return self.numVars - 1 + + def addEquation(self, x, isProperty=False): + """Function to add new equation to the network + + Args: + x (:class:`~maraboupy.MarabouUtils.Equation`): New equation to add + isProperty (bool): If true, this constraint can be removed later by clearProperty() method + """ + if isProperty: + self.additionalEquList += [x] + else: + self.equList += [x] + + def setLowerBound(self, x, v): + """Function to set lower bound for variable + + Args: + x (int): Variable number to set + v (float): Value representing lower bound + """ + self.lowerBounds[x]=v + + def setUpperBound(self, x, v): + """Function to set upper bound for variable + + Args: + x (int): Variable number to set + v (float): Value representing upper bound + """ + self.upperBounds[x]=v + + def addRelu(self, v1, v2): + """Function to add a new Relu constraint + + Args: + v1 (int): Variable representing input of Relu + v2 (int): Variable representing output of Relu + """ + self.reluList += [(v1, v2)] + + def addLeakyRelu(self, v1, v2, slope): + """Function to add a new Leaky Relu constraint + + Args: + v1 (int): Variable representing input of Leaky Relu + v2 (int): Variable representing output of Leaky Relu + slope (float): Shope of the Leaky ReLU + """ + self.leakyReluList += [(v1, v2, slope)] + + def addBilinear(self, v1, v2, v3): + """Function to add a bilinear constraint to the network + Args: + v1 (int): Variable representing input1 of Bilinear + v2 (int): Variable representing input2 of Bilinear + v3 (int): Variable representing output of Bilinear + """ + self.bilinearList += [(v1, v2, v3)] + + def addSigmoid(self, v1, v2): + """Function to add a new Sigmoid constraint + + Args: + v1 (int): Variable representing input of Sigmoid + v2 (int): Variable representing output of Sigmoid + """ + self.sigmoidList += [(v1, v2)] + + def addMaxConstraint(self, elements, v): + """Function to add a new Max constraint + + Args: + elements (set of int): Variable representing input to max constraint + v (int): Variable representing output of max constraint + """ + self.maxList += [(elements, v)] + + def addSoftmaxConstraint(self, inputs, outputs): + """Function to add a new softmax constraint + + Args: + inputs (set of int): Variable representing input to max constraint + outputs (set of int): Variables representing outputs of max constraint + """ + self.softmaxList += [(inputs, outputs)] + + def addAbsConstraint(self, b, f): + """Function to add a new Abs constraint + + Args: + b (int): Variable representing input of the Abs constraint + f (int): Variable representing output of the Abs constraint + """ + self.absList += [(b, f)] + + def addSignConstraint(self, b, f): + """Function to add a new Sign constraint + + Args: + b (int): Variable representing input of Sign + f (int): Variable representing output of Sign + """ + self.signList += [(b, f)] + + def addDisjunctionConstraint(self, disjuncts): + """Function to add a new Disjunction constraint + + Args: + disjuncts (list of list of Equations): Each inner list represents a disjunct + """ + self.disjunctionList.append(disjuncts) + + def lowerBoundExists(self, x): + """Function to check whether lower bound for a variable is known + + Args: + x (int): Variable to check + """ + return x in self.lowerBounds + + def upperBoundExists(self, x): + """Function to check whether upper bound for a variable is known + + Args: + x (int): Variable to check + """ + return x in self.upperBounds + + def addEquality(self, vars, coeffs, scalar, isProperty=False): + """Function to add equality constraint to network + + .. math:: + \sum_i vars_i * coeffs_i = scalar + + Args: + vars (list of int): Variable numbers + coeffs (list of float): Coefficients + scalar (float): Right hand side constant of equation + isProperty (bool): If true, this constraint can be removed later by clearProperty() method + """ + assert len(vars)==len(coeffs) + e = MarabouUtils.Equation() + for i in range(len(vars)): + e.addAddend(coeffs[i], vars[i]) + e.setScalar(scalar) + self.addEquation(e, isProperty) + + def addInequality(self, vars, coeffs, scalar, isProperty=False): + """Function to add inequality constraint to network + + .. math:: + \sum_i vars_i * coeffs_i \le scalar + + Args: + vars (list of int): Variable numbers + coeffs (list of float): Coefficients + scalar (float): Right hand side constant of inequality + isProperty (bool): If true, this constraint can be removed later by clearProperty() method + """ + assert len(vars)==len(coeffs) + e = MarabouUtils.Equation(MarabouCore.Equation.LE) + for i in range(len(vars)): + e.addAddend(coeffs[i], vars[i]) + e.setScalar(scalar) + self.addEquation(e, isProperty) + + def addConstraint(self, constraint: VarConstraint): + """ + Support the Pythonic API to add constraints to the neurons in the Marabou network. + + :param constraint: an instance of the VarConstraint class, which comprises various neuron constraints. + :return: delegate various constraints into lower/upper bounds and equality/inequality. + """ + vars = list(constraint.combination.varCoeffs) + coeffs = [constraint.combination.varCoeffs[i] for i in vars] + if constraint.lowerBound is not None: + self.setLowerBound(vars[0], constraint.lowerBound) + elif constraint.upperBound is not None: + self.setUpperBound(vars[0], constraint.upperBound) + else: + if constraint.isEquality: + self.addEquality(vars, coeffs, - constraint.combination.scalar) + else: + self.addInequality(vars, coeffs, - constraint.combination.scalar) + + def getInputQuery(self): + """Constructs the `InputQuery` object from the current set of constraints. + + Returns: + :class:`~maraboupy.MarabouCore.InputQuery` + """ + ipq = MarabouCore.InputQuery() + ipq.setNumberOfVariables(self.numVars) + + i = 0 + for inputVarArray in self.inputVars: + for inputVar in inputVarArray.flatten(): + ipq.markInputVariable(inputVar, i) + i+=1 + + i = 0 + for outputVarArray in self.outputVars: + for outputVar in outputVarArray.flatten(): + ipq.markOutputVariable(outputVar, i) + i+=1 + + for e in self.equList: + eq = MarabouCore.Equation(e.EquationType) + for (c, v) in e.addendList: + assert v < self.numVars + eq.addAddend(c, v) + eq.setScalar(e.scalar) + ipq.addEquation(eq) + + for e in self.additionalEquList: + eq = MarabouCore.Equation(e.EquationType) + for (c, v) in e.addendList: + assert v < self.numVars + eq.addAddend(c, v) + eq.setScalar(e.scalar) + ipq.addEquation(eq) + + for r in self.reluList: + assert r[1] < self.numVars and r[0] < self.numVars + MarabouCore.addReluConstraint(ipq, r[0], r[1]) + + for r in self.leakyReluList: + assert r[1] < self.numVars and r[0] < self.numVars + assert(r[2] > 0 and r[2] < 1) + MarabouCore.addLeakyReluConstraint(ipq, r[0], r[1], r[2]) + + for r in self.bilinearList: + assert r[2] < self.numVars and r[1] < self.numVars and r[0] < self.numVars + MarabouCore.addBilinearConstraint(ipq, r[0], r[1], r[2]) + + for r in self.sigmoidList: + assert r[1] < self.numVars and r[0] < self.numVars + MarabouCore.addSigmoidConstraint(ipq, r[0], r[1]) + + for m in self.maxList: + assert m[1] < self.numVars + for e in m[0]: + assert e < self.numVars + MarabouCore.addMaxConstraint(ipq, m[0], m[1]) + + for m in self.softmaxList: + for e in m[1]: + assert e < self.numVars + for e in m[0]: + assert e < self.numVars + MarabouCore.addSoftmaxConstraint(ipq, m[0], m[1]) + + for b, f in self.absList: + MarabouCore.addAbsConstraint(ipq, b, f) + + for b, f in self.signList: + MarabouCore.addSignConstraint(ipq, b, f) + + for disjunction in self.disjunctionList: + converted_disjunction = [] + for disjunct in disjunction: + converted_disjunct = [] + for e in disjunct: + eq = MarabouCore.Equation(e.EquationType) + for (c, v) in e.addendList: + assert v < self.numVars + eq.addAddend(c, v) + eq.setScalar(e.scalar) + converted_disjunct.append(eq) + converted_disjunction.append(converted_disjunct) + MarabouCore.addDisjunctionConstraint(ipq, converted_disjunction) + + for l in self.lowerBounds: + assert l < self.numVars + ipq.setLowerBound(l, self.lowerBounds[l]) + + for u in self.upperBounds: + assert u < self.numVars + ipq.setUpperBound(u, self.upperBounds[u]) + + return ipq + + def saveQuery(self, filename=""): + """Serializes the inputQuery in the given filename + + Args: + filename: (string) file to write serialized inputQuery + """ + ipq = self.getInputQuery() + MarabouCore.saveQuery(ipq, str(filename)) + + def isEqualTo(self, network): + """ + Add a comparison between two Marabou networks and all their attributes. + + :param network: the other Marabou network to be compared with. + :return: True if these two networks and all their attributes are identical; False if not. + """ + equivalence = True + if self.numVars != network.numVars \ + or self.reluList != network.reluList \ + or self.sigmoidList != network.sigmoidList \ + or self.maxList != network.maxList \ + or self.absList != network.absList \ + or self.signList != network.signList \ + or self.disjunctionList != network.disjunctionList \ + or self.lowerBounds != network.lowerBounds \ + or self.upperBounds != network.upperBounds: + equivalence = False + for equation1, equation2 in zip(self.equList, network.equList): + if not equation1.isEqualTo(equation2): + equivalence = False + for inputvars1, inputvars2 in zip(self.inputVars, network.inputVars): + if (inputvars1.flatten() != inputvars2.flatten()).any(): + equivalence = False + for outputVars1, outputVars2 in zip(self.outputVars, network.outputVars): + if (outputVars1.flatten() != outputVars1.flatten()).any(): + equivalence = False + return equivalence diff --git a/maraboupy/parsers/ONNXParser.py b/maraboupy/parsers/ONNXParser.py new file mode 100644 index 0000000000..1bb2613b96 --- /dev/null +++ b/maraboupy/parsers/ONNXParser.py @@ -0,0 +1,1439 @@ +''' +Top contributors (to current version): + - Kyle Julian + - Haoze Wu + - Teruhiro Tagomori + - Tobey Shim + - Idan Refaeli + +This file is part of the Marabou project. +Copyright (c) 2017-2019 by the authors listed in the file AUTHORS +in the top-level source directory) and their institutional affiliations. +All rights reserved. See the file COPYING in the top-level source +directory for licensing information. + +MarabouNetworkONNX represents neural networks with piecewise linear constraints derived from the ONNX format +''' +from typing import List +import numpy as np +from onnx import numpy_helper +from onnx.helper import get_attribute_value +from maraboupy import MarabouUtils +from maraboupy.parsers.InputQueryBuilder import InputQueryBuilder +from onnx import TensorProto +import itertools +from copy import copy +from onnx.reference.ops._op_list import Split_18, Unsqueeze_1 + +class ONNXParser: + """ + Class for parsing ONNX files. + Should eventually be implemented by a renamed `ONNXParser.cpp` on the C++ side. + """ + + @staticmethod + def parse(query:InputQueryBuilder, graph, inputNames:List[str], outputNames:List[str], reindexOutputVars=True): + """ + Parses the provided ONNX graph into constraints which are stored in the query argument. + + Args: + query: the query to which the constraints are added. + graph: the graph of the ONNX file to parse. + inputNames: list of node names corresponding to inputs + outputNames: list of node names corresponding to outputs + + Returns: + :class:`~maraboupy.Marabou.marabouNetworkONNX.marabouNetworkONNX` + """ + parser = ONNXParser(query, graph, inputNames, outputNames) + parser.parseGraph(reindexOutputVars=reindexOutputVars) + + + def __init__(self, query:InputQueryBuilder, graph, inputNames, outputNames): + """ + Should not be called directly. Use `ONNXParser.parse` instead. + """ + super().__init__() + self.query = query + self.graph = graph + self.inputNames = inputNames + self.outputNames = outputNames + + self.madeGraphEquations = [] + self.varMap = dict() + self.constantMap = dict() + self.shapeMap = dict() + + + def parseGraph(self, reindexOutputVars=True): + """Read an ONNX file and create a MarabouNetworkONNX object + Args: + reindexOutputVars: (bool): Reindex the variables so that the output variables are immediate after input variables. + + :meta private: + """ + + # Process the shapes and values of the graph while making Marabou equations and constraints + self.foundnInputFlags = 0 + self.processGraph() + + # If the given inputNames/outputNames specify only a portion of the network, then we will have + # shape information saved not relevant to the portion of the network. Remove extra shapes. + self.cleanShapes() + + if reindexOutputVars: + # Other Marabou input parsers assign output variables immediately after input variables and before any + # intermediate variables. This function reassigns variable numbering to match other parsers. + # If this is skipped, the output variables will be the last variables defined. + self.reassignOutputVariables() + else: + for outputName in self.outputNames: + if outputName in self.constantMap: + raise RuntimeError("Output variable %s is a constant, not the output of equations!" % outputName) + self.query.outputVars.extend([self.varMap[outputName] for outputName in self.outputNames]) + + def processGraph(self): + """Processes the ONNX graph to produce Marabou equations + + :meta private: + """ + # Add shapes for the graph's inputs + for node in self.graph.input: + self.shapeMap[node.name] = list([dim.dim_value if dim.dim_value > 0 else 1 for dim in node.type.tensor_type.shape.dim]) + + # If we find one of the specified inputs, create new variables + if node.name in self.inputNames: + self.madeGraphEquations += [node.name] + self.foundnInputFlags += 1 + self.makeNewVariables(node.name) + self.query.inputVars += [np.array(self.varMap[node.name])] + + # Add shapes for constants + for node in self.graph.initializer: + self.shapeMap[node.name] = list(node.dims) + self.madeGraphEquations += [node.name] + + # Recursively create remaining shapes and equations as needed + for outputName in self.outputNames: + self.makeGraphEquations(outputName, True) + + def makeGraphEquations(self, nodeName, makeEquations): + """Recursively populates self.shapeMap, self.varMap, and self.constantMap while adding equations and constraints + + Args: + nodeName (str): Name of node for making the shape + makeEquations (bool): Create Marabou equations for this node if True + + :meta private: + """ + if nodeName in self.madeGraphEquations: + return + + if nodeName in self.inputNames: + self.foundnInputFlags += 1 + # If an inputName is an intermediate layer of the network, we don't need to create Marabou + # equations for its inputs. However, we still need to call makeMarabouEquations in order to + # compute shapes. We just need to set the makeEquations flag to false + makeEquations = False + self.madeGraphEquations += [nodeName] + + # Recursively call makeGraphEquations, then call makeMarabouEquations + # This ensures that shapes and values of a node's inputs have been computed first + for inNodeName in self.getInputNodes(nodeName): + self.makeGraphEquations(inNodeName, makeEquations) + + # By this point, all input variables need to have been found + if self.foundnInputFlags != len(self.inputNames): + err_msg = "These input variables could not be found: %s"%(", ".join([inVar for inVar in self.inputNames if inVar not in self.varMap])) + raise RuntimeError(err_msg) + + # Compute node's shape and create Marabou equations as needed + self.makeMarabouEquations(nodeName, makeEquations) + + # Create new variables when we find one of the inputs + if nodeName in self.inputNames: + self.makeNewVariables(nodeName) + self.query.inputVars += [np.array(self.varMap[nodeName])] + + def makeMarabouEquations(self, nodeName, makeEquations): + """Compute the shape and values of a node assuming the input shapes and values have been computed already. + + Args: + nodeName (str): Name of node for which we want to compute the output shape + makeEquations (bool): Create Marabou equations for this node if True + + :meta private: + """ + node = self.getNode(nodeName) + if node.op_type == 'Constant': + self.constant(node) + elif node.op_type == 'Identity': + self.identity(node) + elif node.op_type == 'Cast': + self.cast(node) + elif node.op_type == 'Reshape': + self.reshape(node) + elif node.op_type == 'Flatten': + self.flatten(node) + elif node.op_type == "Transpose": + self.transpose(node) + elif node.op_type == 'Unsqueeze': + self.unsqueeze(node) + elif node.op_type == 'Squeeze': + self.squeeze(node) + elif node.op_type == "BatchNormalization": + self.batchNorm(node, makeEquations) + elif node.op_type == 'Concat': + self.concatEquations(node) + elif node.op_type == "MaxPool": + self.maxpoolEquations(node, makeEquations) + elif node.op_type == "Conv": + self.convEquations(node, makeEquations) + elif node.op_type == 'Gemm': + self.gemmEquations(node, makeEquations) + elif node.op_type == 'MatMul': + self.matMulEquations(node, makeEquations) + elif node.op_type == 'Mul': + self.mulEquations(node, makeEquations) + elif node.op_type == 'Add': + self.addEquations(node, makeEquations) + elif node.op_type == 'Relu': + self.reluEquations(node, makeEquations) + elif node.op_type == 'Sigmoid': + self.sigmoidEquations(node, makeEquations) + elif node.op_type == 'Split': + self.splitEquations(node, nodeName, makeEquations) + elif node.op_type == 'Resize': + self.resizeEquations(node, makeEquations) + elif node.op_type == 'Tanh': + self.tanhEquations(node, makeEquations) + elif node.op_type == 'LeakyRelu': + self.leakyReluEquations(node, makeEquations) + elif node.op_type == 'Softmax': + self.softmaxEquations(node, makeEquations) + elif node.op_type == 'Sub': + self.subEquations(node, makeEquations) + else: + raise NotImplementedError("Operation {} not implemented".format(node.op_type)) + + def getNode(self, nodeName): + """Find the node in the graph corresponding to the given name + + Args: + nodeName (str): Name of node to find in graph + + Returns: + (str): ONNX node named nodeName + + :meta private: + """ + nodes = [node for node in self.graph.node if nodeName in node.output] + assert len(nodes) == 1 + return nodes[0] + + def makeNewVariables(self, nodeName): + """Assuming the node's shape is known, return a set of new variables in the same shape + + Args: + nodeName (str): Name of node + + Returns: + (numpy array): Array of variable numbers + + :meta private: + """ + assert nodeName not in self.varMap + shape = self.shapeMap[nodeName] + size = np.prod(shape) + v = np.array([self.query.getNewVariable() for _ in range(size)]).reshape(shape) + self.varMap[nodeName] = v + assert all([np.equal(np.mod(i, 1), 0) for i in v.reshape(-1)]) # check if integers + return v + + def getInputNodes(self, nodeName): + """Get names of nodes that are inputs to the given node + + Args: + nodeName (str): Name of node + + Returns: + (list of str): Names of nodes that are inputs to the given node + + :meta private: + """ + node = self.getNode(nodeName) + inNodes = [] + for inp in node.input: + if len([nde for nde in self.graph.node if inp in nde.output]): + inNodes += [inp] + elif len([nde for nde in self.graph.initializer if nde.name == inp]): + self.constantMap[inp] = [numpy_helper.to_array(init) for init in self.graph.initializer if init.name == inp][0] + return inNodes + + def constant(self, node): + """Function representing a constant tensor + + Args: + node (node): ONNX node representing constant operation + + :meta private: + """ + nodeName = node.output[0] + for attr in node.attribute: + if attr.name == "value": + self.constantMap[nodeName] = numpy_helper.to_array(get_attribute_value(attr)) + return + raise RuntimeError("Could not find value of tensor constant") + + def identity(self, node): + """Function representing identity + + Args: + node (node): ONNX node representing identity operation + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + if inputName in self.varMap: + self.varMap[nodeName] = self.varMap[inputName] + elif inputName in self.constantMap: + self.constantMap[nodeName] = self.constantMap[inputName] + + def cast(self, node): + """Function representing cast + + Args: + node (node): ONNX node representing cast operation + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + + # Try to find type to cast to. If not found, raise error + to = None + for attr in node.attribute: + if attr.name == "to": + to = get_attribute_value(attr) + if to is None: + raise RuntimeError("Casting type not specified with attribute 'to'") + + # Cast input array to correct type, and throw error if type is unknown + if inputName in self.constantMap: + if to == TensorProto.FLOAT16: + self.constantMap[nodeName] = self.constantMap[inputName].astype('float16') + elif to == TensorProto.FLOAT: + self.constantMap[nodeName] = self.constantMap[inputName].astype('float32') + elif to == TensorProto.DOUBLE: + self.constantMap[nodeName] = self.constantMap[inputName].astype('double') + elif to == TensorProto.UINT8: + self.constantMap[nodeName] = self.constantMap[inputName].astype('uint8') + elif to == TensorProto.UINT16: + self.constantMap[nodeName] = self.constantMap[inputName].astype('uint16') + elif to == TensorProto.UINT32: + self.constantMap[nodeName] = self.constantMap[inputName].astype('uint32') + elif to == TensorProto.UINT64: + self.constantMap[nodeName] = self.constantMap[inputName].astype('uint64') + elif to == TensorProto.INT8: + self.constantMap[nodeName] = self.constantMap[inputName].astype('int8') + elif to == TensorProto.INT16: + self.constantMap[nodeName] = self.constantMap[inputName].astype('int16') + elif to == TensorProto.INT32: + self.constantMap[nodeName] = self.constantMap[inputName].astype('int32') + elif to == TensorProto.INT64: + self.constantMap[nodeName] = self.constantMap[inputName].astype('int64') + else: + err_msg = "Unknown type for casting: %d\n" % to + err_msg += "Check here for ONNX TensorProto: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto" + raise NotImplementedError(err_msg) + + # We shouldn't be casting variables to different types, since Marabou assumes variables have double precision + elif inputName in self.varMap: + raise NotImplementedError("Casting variables not allowed with Marabou") + + def reshape(self, node): + """Function representing reshape + + Args: + node (node): ONNX node representing reshape operation + + :meta private: + """ + nodeName = node.output[0] + inputName1, inputName2 = node.input + + # Assume first input is array to be reshaped, second input is the new shape array + reshapeVals = self.constantMap[inputName2] + self.shapeMap[nodeName] = list(np.zeros(self.shapeMap[inputName1]).reshape(reshapeVals).shape) + if inputName1 in self.varMap: + self.varMap[nodeName] = copy(self.varMap[inputName1]).reshape(reshapeVals) + elif inputName1 in self.constantMap: + self.constantMap[nodeName] = self.constantMap[inputName1].reshape(reshapeVals) + + def flatten(self, node): + """Function representing flatten + + Unlike numpy.flatten(), ONNX's Flatten operation reshapes + a (d_0, d_1, ..., d_n) tensor into a 2D tensor with shape + (d_0 * d_1 * ... * d_(axis-1), d_axis * d_(axis+1) * ... * d_n). + + Args: + node (node): ONNX node representing flatten operation + + :meta private: + """ + nodeName = node.output[0] + + # Assume first input is array to be flattened + inputName = node.input[0] + axis = 1 + for attr in node.attribute: + if attr.name == "axis": + axis = get_attribute_value(attr) + + dimension1 = int(np.prod(self.shapeMap[inputName][:axis])) + dimension2 = int(np.prod(self.shapeMap[inputName][axis:])) + newShape = [dimension1, dimension2] + self.shapeMap[nodeName] = newShape + + if inputName in self.varMap: + self.varMap[nodeName] = self.varMap[inputName].reshape(newShape) + elif inputName in self.constantMap: + self.constantMap[nodeName] = self.constantMap[inputName].reshape(newShape) + + def transpose(self, node): + """Function representing transpose + + Args: + node (node): ONNX node representing transpose operation + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + + # Get attributes + perm = None + for attr in node.attribute: + if attr.name == "perm": + perm = get_attribute_value(attr) + if perm is None: + raise RuntimeError("Permutation indices not specified by attibute 'perm'") + self.shapeMap[nodeName] = [self.shapeMap[inputName][p] for p in perm] + if inputName in self.varMap: + self.varMap[nodeName] = \ + np.transpose(self.varMap[node.input[0]].reshape(self.shapeMap[node.input[0]]), + perm) + elif inputName in self.constantMap: + self.constantMap[nodeName] = np.transpose(self.constantMap[inputName], perm) + + def unsqueeze(self, node): + """Function representing unsqueeze + + Args: + node (node): ONNX node representing unsqueeze operation + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + axes = self.constantMap[node.input[1]] + + if inputName in self.varMap: + output_data = Unsqueeze_1.eval(self.varMap[inputName], axes=axes) + self.shapeMap[nodeName] = output_data.shape + self.varMap[nodeName] = output_data + else: + output_data = Unsqueeze_1.eval(self.constantMap[inputName], axes=axes) + self.shapeMap[nodeName] = output_data.shape + self.constantMap[nodeName] = output_data + + + def squeeze(self, node): + """Function representing squeeze + + Args: + node (node): ONNX node representing squeeze operation + + :meta private: + """ + nodeName = node.output[0] + inputName, axisName = node.input + + axis = self.constantMap[axisName] + + axis_ = copy(axis) + if inputName in self.varMap: + vars = copy(self.varMap[inputName]) + for i in range(len(axis)): + vars = np.squeeze(vars, axis_[i]) + for j in range(len(axis))[i+1:]: + axis_[j] -= 1 + self.varMap[nodeName] = vars + self.shapeMap[nodeName] = vars.shape + return + + def batchNorm(self, node, makeEquations): + """Function to generate equations for a BatchNormalization + + Args: + node (node): ONNX node representing the BatchNormalization operation + + :meta private + """ + + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + + # Get attributes + epsilon = None + for attr in node.attribute: + if attr.name == "epsilon": + epsilon = get_attribute_value(attr) + + # Get inputs + scales = self.constantMap[node.input[1]].reshape(-1) + biases = self.constantMap[node.input[2]].reshape(-1) + input_means = self.constantMap[node.input[3]].reshape(-1) + input_variances = self.constantMap[node.input[4]].reshape(-1) + + if not makeEquations: + return + + numChannels = len(scales) + + # Get variables + inputVars = self.varMap[inputName].reshape(numChannels, -1) + outputVars = self.makeNewVariables(nodeName).reshape(numChannels, -1) + assert(inputVars.shape == outputVars.shape) + + numInputs = inputVars.shape[1] + + for i in range(numChannels): + for j in range(numInputs): + # Add equation + # To know this computation, + # refer to https://github.com/onnx/onnx/blob/master/docs/Operators.md#batchnormalization. + e = MarabouUtils.Equation() + e.addAddend(1 / np.sqrt(input_variances[i] + epsilon) * scales[i], inputVars[i][j]) + e.addAddend(-1, outputVars[i][j]) + e.setScalar(input_means[i] / np.sqrt(input_variances[i] + epsilon) * scales[i] - biases[i]) + self.query.addEquation(e) + + def maxpoolEquations(self, node, makeEquations): + """Function to generate maxpooling equations + + Args: + node (node): ONNX node representing maxpool operation + makeEquations (bool): True if we need to create new variables and maxpool constraints + + :meta private: + """ + nodeName = node.output[0] + + # Extract attributes and define shape + inputShape = self.shapeMap[node.input[0]] + kernel_shape = [1, 1] + strides = [1, 1] + for attr in node.attribute: + if attr.name == 'kernel_shape': + kernel_shape = get_attribute_value(attr) + elif attr.name == 'strides': + strides = get_attribute_value(attr) + + outputShape = [dim for dim in inputShape] + outputShape[2] = int(np.ceil((inputShape[2] - ((kernel_shape[0] - 1) + 1) + 1) / strides[0])) + outputShape[3] = int(np.ceil((inputShape[3] - ((kernel_shape[1] - 1) + 1) + 1) / strides[1])) + self.shapeMap[nodeName] = outputShape + + if not makeEquations: + return + + inVars = self.varMap[node.input[0]] + outVars = self.makeNewVariables(nodeName) + for i in range(outputShape[2]): + for j in range(outputShape[3]): + for k in range(outputShape[1]): + maxVars = set() + for di in range(strides[0]*i, strides[0]*i + kernel_shape[0]): + for dj in range(strides[1]*j, strides[1]*j + kernel_shape[1]): + if di < inputShape[2] and dj < inputShape[3]: + maxVars.add(inVars[0][k][di][dj]) + self.query.addMaxConstraint(maxVars, outVars[0][k][i][j]) + + def softmaxEquations(self, node, makeEquations): + """Function to generate constraints for softmax + + Args: + node (node): ONNX node representing the softmax operation + makeEquations (bool): True if we need to create new variables and maxpool constraints + + :meta private: + """ + nodeName = node.output[0] + + # Extract attributes and define shape + inputShape = self.shapeMap[node.input[0]] + for attr in node.attribute: + if attr.name == 'axis': + axis = get_attribute_value(attr) + + self.shapeMap[nodeName] = inputShape + + if not makeEquations: + return + + inVars = self.varMap[node.input[0]] + outVars = self.makeNewVariables(nodeName) + + if len(inputShape) == 2 and inputShape[0] == 1: + self.query.addSoftmaxConstraint(list(np.array(inVars).flatten()), list(np.array(outVars).flatten())) + else: + axis = ( len(inputShape) + axis ) % len(inputShape) + perm = [] + for i, s in enumerate(inputShape): + if i == axis: + continue + perm.append(i) + perm.append(axis) + + inVarsReshaped = np.transpose(inVars, perm).reshape(-1, inputShape[axis]) + outVarsReshaped = np.transpose(outVars, perm).reshape(-1, inputShape[axis]) + for i in range(inVarsReshaped.shape[0]): + self.query.addSoftmaxConstraint(inVarsReshaped[i], outVarsReshaped[i]) + + def convEquations(self, node, makeEquations): + """Function to generate equations for a 2D convolution + + Args: + node (node): ONNX node representing the 2D Convolution operation + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + nodeName = node.output[0] + + # Extract information about convolution + strides = [1, 1] + pads = [0, 0, 0, 0] + for attr in node.attribute: + if attr.name == 'strides': + strides = get_attribute_value(attr) + elif attr.name == 'pads': + pads = get_attribute_value(attr) + pad_left, pad_bottom, pad_right, pad_top = pads + + # Get input shape information + # First input should be variable tensor, the second a weight matrix defining filters + shape0 = self.shapeMap[node.input[0]] + shape1 = self.shapeMap[node.input[1]] + input_channels = shape0[1] + input_width = shape0[2] + input_height = shape0[3] + num_filters = shape1[0] + filter_channels = shape1[1] + filter_width = shape1[2] + filter_height = shape1[3] + + # The third input is optional and specifies a bias for each filter + # Bias is 0 if third input is not given + biases = np.zeros(num_filters) + if len(node.input) == 3: + biases = self.constantMap[node.input[2]] + + # The number of channels should match between input variable and filters + assert input_channels == filter_channels + + # Compute output shape + out_width = (input_width - filter_width + pad_left + pad_right) // strides[0] + 1 + out_height = (input_height - filter_height + pad_bottom + pad_top) // strides[1] + 1 + out_channels = num_filters + self.shapeMap[nodeName] = [shape0[0], out_channels, out_width, out_height] + + if not makeEquations: + return + + inVars = self.varMap[node.input[0]] + weights = self.constantMap[node.input[1]] + outVars = self.makeNewVariables(nodeName) + + ### Generate actual equations ### + # There is one equation for every output variable + for i in range(out_width): + for j in range(out_height): + for k in range(out_channels): # Out_channel corresponds to filter number + e = MarabouUtils.Equation() + + # The equation convolves the filter with the specified input region + # Iterate over the filter + for di in range(filter_width): + for dj in range(filter_height): + for dk in range(filter_channels): + w_ind = int(strides[0]*i+di - pad_left) + h_ind = int(strides[1]*j+dj - pad_bottom) + if h_ind < input_height and h_ind >= 0 and w_ind < input_width and w_ind >= 0: + var = inVars[0][dk][w_ind][h_ind] + c = weights[k][dk][di][dj] + e.addAddend(c, var) + + # Add output variable + e.addAddend(-1, outVars[0][k][i][j]) + e.setScalar(-biases[k]) + self.query.addEquation(e) + + def gemmEquations(self, node, makeEquations): + """Function to generate equations corresponding to Gemm (general matrix multiplication) + + Args: + node (node): ONNX node representing the Gemm operation + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + nodeName = node.output[0] + + # Get inputs + if len(node.input) == 3: + inputName1, inputName2, inputName3 = node.input + else: + inputName1, inputName2 = node.input + inputName3 = None + shape1 = self.shapeMap[inputName1] + shape2 = self.shapeMap[inputName2] + + # Transpose first two inputs if needed, + # and save scaling parameters alpha and beta if set + alpha = 1.0 + beta = 1.0 + transA = 0 + transB = 0 + for attr in node.attribute: + if attr.name == 'transA': + transA = get_attribute_value(attr) + elif attr.name == 'transB': + transB = get_attribute_value(attr) + elif attr.name == 'alpha': + alpha = get_attribute_value(attr) + elif attr.name == 'beta': + beta = get_attribute_value(attr) + + if transA: + shape1 = shape1[::-1] + if transB: + shape2 = shape2[::-1] + outShape = [shape1[0], shape2[1]] + self.shapeMap[nodeName] = outShape + if not makeEquations: + return + + # Assume that first input is variables, second is Matrix for MatMul, and third is bias addition + input1 = self.varMap[inputName1] + input2 = self.constantMap[inputName2] + if inputName3: + input3 = self.constantMap[inputName3] + + # Transpose inputs + if transA: + input1 = np.transpose(input1) + if transB: + input2 = np.transpose(input2) + if inputName3: + input3 = np.broadcast_to(input3, outShape) + + assert shape1[-1] == shape2[0] + assert shape1[0] == outShape[0] + assert shape2[1] == outShape[1] + + # Create new variables + outputVariables = self.makeNewVariables(nodeName) + # Generate equations + for i in range(shape1[0]): + for j in range(shape2[1]): + e = MarabouUtils.Equation() + for k in range(shape1[1]): + e.addAddend(input2[k][j]*alpha, input1[i][k]) + + # Put output variable as the last addend last + e.addAddend(-1, outputVariables[i][j]) + if inputName3: + e.setScalar(-input3[i][j]*beta) + else: + e.setScalar(0) + self.query.addEquation(e) + + + def matMulEquations(self, node, makeEquations): + """Function to generate equations corresponding to matrix multiplication + + Args: + node (node): ONNX node representing the MatMul operation + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + nodeName = node.output[0] + + # Get inputs and determine which inputs are constants and which are variables + inputName1, inputName2 = node.input + shape1 = self.shapeMap[inputName1] + shape2 = self.shapeMap[inputName2] + if len(shape1) > 2 and shape1[0] == 1: + shape1 = shape1[1:] + elif len(shape1) == 1: + # Broadcast first input to make sure the first input is a matrix + shape1 = [1] + shape1 + + if len(shape2) > 2 and shape2[0] == 1: + shape2 = shape2[1:] + a = np.zeros(shape1) + b = np.zeros(shape2) + c = np.matmul(a, b) + outshape = c.shape + self.shapeMap[nodeName] = outshape + if not makeEquations: + return + + firstInputConstant = False; secondInputConstant = False + if inputName1 in self.constantMap: + input1 = self.constantMap[inputName1] + firstInputConstant = True + else: + input1 = self.varMap[inputName1] + + if inputName2 in self.constantMap: + input2 = self.constantMap[inputName2] + secondInputConstant = True + else: + input2 = self.varMap[inputName2] + + input1 = input1.reshape(shape1) + input2 = input2.reshape(shape2) + + # If both inputs are constant, than the output is constant as well, and we don't need new variables or equations + if firstInputConstant and secondInputConstant: + self.constantMap[nodeName] = np.matmul(input1,input2) + return + + # Create new variables + outputVariables = self.makeNewVariables(nodeName) + + if not firstInputConstant and not secondInputConstant: + # bi-linear constraints + self.addBilinearConstraints(shape1, shape2, input1, input2, outputVariables) + else: + # Generate equations + for i in range(shape1[0]): + # Differentiate between matrix-vector multiplication and matrix-matrix multiplication + if len(shape2)>1: + for j in range(shape2[1]): + e = MarabouUtils.Equation() + for k in range(shape1[1]): + if firstInputConstant: + e.addAddend(input1[i][k], input2[k][j]) + else: + e.addAddend(input2[k][j], input1[i][k]) + + # Put output variable as the last addend last + e.addAddend(-1, outputVariables[i][j]) + e.setScalar(0.0) + self.query.addEquation(e) + else: + e = MarabouUtils.Equation() + for k in range(shape1[1]): + if firstInputConstant: + e.addAddend(input1[i][k], input2[k]) + else: + e.addAddend(input2[k], input1[i][k]) + + # Put output variable as the last addend last + e.addAddend(-1, outputVariables[i]) + e.setScalar(0.0) + self.query.addEquation(e) + + def addBilinearConstraints(self, shape1, shape2, input1, input2, outputVariables): + # Generate equations + if len(shape2) == 2: + for i in range(shape1[0]): + for j in range(shape2[1]): + e = MarabouUtils.Equation() + for k in range(shape1[1]): + v = self.query.getNewVariable() + self.query.addBilinear(input1[i][k], input2[k][j], v) + e.addAddend(1, v) + + # Put output variable as the last addend last + e.addAddend(-1, outputVariables[i][j]) + e.setScalar(0.0) + self.query.addEquation(e) + elif len(shape2) == 3: + assert(shape1[0] == shape2[0]) + for l in range(shape1[0]): + for i in range(shape1[1]): + for j in range(shape2[2]): + e = MarabouUtils.Equation() + for k in range(shape1[2]): + v = self.query.getNewVariable() + self.query.addBilinear(input1[l][i][k], input2[l][k][j], v) + e.addAddend(1, v) + + # Put output variable as the last addend last + e.addAddend(-1, outputVariables[l][i][j]) + e.setScalar(0.0) + self.query.addEquation(e) + else: + raise RuntimeError(f"Unsupported shape in matMul tensor: {shape2}") + + def concatEquations(self, node): + """Function to generate equations corresponding to concat + + Args: + node (node): ONNX node representing the Concat operation + + :meta private: + """ + nodeName = node.output[0] + + # Get attributes + axis = None + for attr in node.attribute: + if attr.name == "axis": + axis = get_attribute_value(attr) + + # Set maps of shape and var + inputVars = list([self.varMap[input] for input in node.input]) + outputVars = np.concatenate(inputVars, axis) + self.shapeMap[nodeName] = outputVars.shape + self.varMap[nodeName] = outputVars + + def splitEquations(self, node, nodeName, makeEquations): + """Function to generate equations corresponding to split + + Args: + node (node): ONNX node representing the Split operation + nodeName (str): Name of target node + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + # Get attributes + axis = None + split = None + for attr in node.attribute: + if attr.name == "axis": + axis = get_attribute_value(attr) + if attr.name == "split": + split = get_attribute_value(attr) + + inputName = node.input[0] + inputVars = Split_18.eval(self.varMap[inputName], split=split, axis=axis) + + assert len(inputVars) == len(node.output) + + # Set a shape of target output + for i in range(len(node.output)): + if node.output[i] == nodeName: + self.shapeMap[node.output[i]] = inputVars[i].shape + break + + if not makeEquations: + return + + # Get variables and add quations + for i in range(len(node.output)): + if node.output[i] == nodeName: + reshapedInputVars = inputVars[i].reshape(-1) + outputVars = self.makeNewVariables(node.output[i]).reshape(-1) + for j in range(len(reshapedInputVars)): + # Add equation + e = MarabouUtils.Equation() + e.addAddend(1, reshapedInputVars[j]) + e.addAddend(-1, outputVars[j]) + e.setScalar(0) + self.query.addEquation(e) + break + + def resizeEquations(self, node, makeEquations): + """Function to generate equations corresponding to resize + + Args: + node (node): ONNX node representing the Resize operation + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + + # Check number of dimension of input + inputVars = self.varMap[inputName] + inputShape = inputVars.shape + if inputVars.ndim != 4: + raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") + + # Get and check attributes + coordinate_transformation_mode = None + cubic_coeff_a = None + mode = None + nearest_mode = None + + for attr in node.attribute: + value = get_attribute_value(attr) + if attr.name == "coordinate_transformation_mode" and value.decode() == "asymmetric": + coordinate_transformation_mode = value + elif attr.name == "cubic_coeff_a" and value == -0.75: + cubic_coeff_a = value + elif attr.name == "mode" and value.decode() == "nearest": + mode = value + elif attr.name == "nearest_mode" and value.decode() == "floor": + nearest_mode = value + else: + # Marabou supports Resize only very specific case below. + # coordinate_transformation_mode: asymmetric + # cubic_coeff_a: -0.75 + # mode: nearest + # nearest_mode: floor + # There are many cases other than the above case according to https://github.com/onnx/onnx/blob/main/docs/Operators.md#resize + # Please note that we should carefully expand this operation beyond this case. + raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") + + # Get scales + scales = None + if len(node.input) == 3 and np.all(self.constantMap[node.input[2]] == [1., 1., 2., 2.]): + scales = [1, 1, 2, 2] + else: + raise NotImplementedError("Marabou only supports resize operator for very specific upsample case used in YOLO now.") + + # Set output shape + outputShape = (inputShape[0], inputShape[1], inputShape[2] * scales[2], inputShape[3] * scales[3]) + self.shapeMap[nodeName] = outputShape + + if not makeEquations: + return + + # Get variables + outputVars = self.makeNewVariables(nodeName) + + assert scales[2] * scales[3] * inputVars.size == outputVars.size + + for i in range(outputShape[1]): + for j in range(outputShape[2]): + for k in range(outputShape[3]): + # Add equation + e = MarabouUtils.Equation() + e.addAddend(1, inputVars[0][i][int(j / 2)][int(k / 2)]) + e.addAddend(-1, outputVars[0][i][j][k]) + e.setScalar(0) + self.query.addEquation(e) + + def mulEquations(self, node, makeEquations): + nodeName = node.output[0] + + # Get the inputs + inputName1, inputName2 = node.input + shape1 = self.shapeMap[inputName1] + # shape2 = self.shapeMap[inputName2] # comment out since this is never used. + + + # Get the broadcasted shape + outShape = shape1 + self.shapeMap[nodeName] = outShape + if not makeEquations: + return + + multiple = self.constantMap[inputName2] + input1 = self.varMap[inputName1] + outputVariables = self.makeNewVariables(nodeName) + input1 = input1.reshape(-1) + outputVariables = outputVariables.reshape(-1) + + for i in range(len(input1)): + e = MarabouUtils.Equation() + e.addAddend(multiple, input1[i]) + e.addAddend(-1, outputVariables[i]) + e.setScalar(0.0) + self.query.addEquation(e) + return + + def addEquations(self, node, makeEquations): + """Function to generate equations corresponding to addition + + Args: + node (node): ONNX node representing the Add operation + makeEquations (bool): True if we need to create new variables and write Marabou equations + + :meta private: + """ + nodeName = node.output[0] + + # Get the inputs + inputName1, inputName2 = node.input + shape1 = self.shapeMap[inputName1] + shape2 = self.shapeMap[inputName2] + + # Get the broadcasted shape + outShape = getBroadcastShape(shape1, shape2) + self.shapeMap[nodeName] = outShape + if not makeEquations: + return + + # Decide which inputs are variables and which are constants + firstInputConstant = False; secondInputConstant = False + if inputName1 in self.constantMap: + firstInputConstant = True + input1 = self.constantMap[inputName1] + else: + input1 = self.varMap[inputName1] + + if inputName2 in self.constantMap: + secondInputConstant = True + input2 = self.constantMap[inputName2] + else: + input2 = self.varMap[inputName2] + + # Broadcast inputs to ensure the shapes match + input1 = np.broadcast_to(input1, outShape) + input2 = np.broadcast_to(input2, outShape) + + # The shape after broadcasting must match + assert input1.shape == input2.shape + + # If both inputs to add are constant, then the output is constant too + # No new variables are needed, we just need to store the output in constantMap + if firstInputConstant and secondInputConstant: + self.constantMap[nodeName] = input1 + input2 + return + + # If both inputs are variables, then we need a new variable to represent + # the sum of the two variables + elif not firstInputConstant and not secondInputConstant: + outputVariables = self.makeNewVariables(nodeName) + input1 = input1.reshape(-1) + input2 = input2.reshape(-1) + outputVariables = outputVariables.reshape(-1) + for i in range(len(input1)): + e = MarabouUtils.Equation() + e.addAddend(1, input1[i]) + e.addAddend(1, input2[i]) + e.addAddend(-1, outputVariables[i]) + e.setScalar(0.0) + self.query.addEquation(e) + return + + # Otherwise, we are adding constants to variables. + # We don't need new equations or new variables if the input variable is the output of a linear equation. + # Instead, we can just edit the scalar term of the existing linear equation. + # However, if the input variables are not outputs of linear equations (input variables or outputs of + # activation functions) then we will need new equations. + if firstInputConstant: + constInput = input1 + varInput = input2 + else: + constInput = input2 + varInput = input1 + constInput = constInput.reshape(-1) + varInput = varInput.reshape(-1) + + # Adjust equations to incorporate the constant addition + numEquationsChanged = 0 + for equ in self.query.equList: + (c,var) = equ.addendList[-1] + assert c == -1 + if var in varInput: + ind = np.where(var == varInput)[0][0] + + # Adjust the equation + equ.setScalar(equ.scalar-constInput[ind]) + numEquationsChanged += 1 + + # If we changed one equation for every input variable, then + # we don't need any new equations + if numEquationsChanged == len(varInput): + self.varMap[nodeName] = copy(varInput).reshape(outShape) + else: + # Otherwise, assert no equations were changed, and we need to create new equations + assert numEquationsChanged == 0 + outputVariables = self.makeNewVariables(nodeName).reshape(-1) + for i in range(len(outputVariables)): + e = MarabouUtils.Equation() + e.addAddend(1, varInput[i]) + e.addAddend(-1, outputVariables[i]) + e.setScalar(-constInput[i]) + self.query.addEquation(e) + + def reluEquations(self, node, makeEquations): + """Function to generate equations corresponding to pointwise Relu + + Args: + node (node): ONNX node representing the Relu operation + makeEquations (bool): True if we need to create new variables and add new Relus + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + if not makeEquations: + return + + # Get variables + inputVars = self.varMap[inputName].reshape(-1) + outputVars = self.makeNewVariables(nodeName).reshape(-1) + assert len(inputVars) == len(outputVars) + + # Generate equations + for i in range(len(inputVars)): + self.query.addRelu(inputVars[i], outputVars[i]) + for f in outputVars: + self.query.setLowerBound(f, 0.0) + + def leakyReluEquations(self, node, makeEquations): + """Function to generate equations corresponding to pointwise LeakyRelu + Args: + node (node): ONNX node representing the LeakyRelu operation + makeEquations (bool): True if we need to create new variables and add new LeakyRelus + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + if not makeEquations: + return + + alpha = 0.01 + for attr in node.attribute: + if attr.name == 'alpha': + alpha = get_attribute_value(attr) + + # Get variables + inputVars = self.varMap[inputName].reshape(-1) + outputVars = self.makeNewVariables(nodeName).reshape(-1) + assert len(inputVars) == len(outputVars) + + # Generate equations + for i in range(len(inputVars)): + self.query.addLeakyRelu(inputVars[i], outputVars[i], alpha) + + def subEquations(self, node, makeEquations): + """Function to generate equations corresponding to subtraction + + Args: + node (node): ONNX node representing the Sub operation + makeEquations (bool): True if we need to create new variables and add new Relus + + :meta private: + """ + nodeName = node.output[0] + inputName1, inputName2 = node.input[0], node.input[1] + assert inputName1 in self.shapeMap and inputName2 in self.shapeMap + assert self.shapeMap[inputName1] == self.shapeMap[inputName2] + self.shapeMap[nodeName] = self.shapeMap[inputName1] + + if not makeEquations: + return + + assert inputName1 in self.varMap and inputName2 in self.constantMap + + # Get variables + inputVars = self.varMap[inputName1].reshape(-1) + outputVars = self.makeNewVariables(nodeName).reshape(-1) + constants = self.constantMap[inputName2].reshape(-1) + assert len(inputVars) == len(outputVars) == len(constants) + + # Generate equations + for i in range(len(inputVars)): + e = MarabouUtils.Equation() + e.addAddend(1, inputVars[i]) + e.addAddend(-1, outputVars[i]) + e.setScalar(-constants[i]) + self.query.addEquation(e) + + def sigmoidEquations(self, node, makeEquations): + """Function to generate equations corresponding to Sigmoid + + Args: + node (node): ONNX node representing the Sigmoid operation + makeEquations (bool): True if we need to create new variables and add new Sigmoids + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + if not makeEquations: + return + + # Get variables + inputVars = self.varMap[inputName].reshape(-1) + outputVars = self.makeNewVariables(nodeName).reshape(-1) + assert len(inputVars) == len(outputVars) + + # Generate equations + for i in range(len(inputVars)): + self.query.addSigmoid(inputVars[i], outputVars[i]) + for f in outputVars: + self.query.setLowerBound(f, 0.0) + self.query.setUpperBound(f, 1.0) + + def tanhEquations(self, node, makeEquations): + """Function to generate equations corresponding to Tanh + + Args: + node (node): ONNX node representing the Tanh operation + makeEquations (bool): True if we need to create new variables and add new Tanhs + + :meta private: + """ + nodeName = node.output[0] + inputName = node.input[0] + self.shapeMap[nodeName] = self.shapeMap[inputName] + if not makeEquations: + return + + # Get variables + inputVars = self.varMap[inputName].reshape(-1) + outputVars = self.makeNewVariables(nodeName).reshape(-1) + assert len(inputVars) == len(outputVars) + firstAffine = np.array([self.query.getNewVariable() for i in range(outputVars.size)]) + sigmoidOutput = np.array([self.query.getNewVariable() for i in range(outputVars.size)]) + + # Generate equations + for i in range(len(inputVars)): # tanh(x) = 2 * \sigmoid(2x) - 1 + self.query.addEquality([inputVars[i], firstAffine[i]], [2.0, -1.0], 0.0, isProperty=False) + self.query.addSigmoid(firstAffine[i], sigmoidOutput[i]) + self.query.addEquality([sigmoidOutput[i], outputVars[i]], [2.0, -1.0], 1.0, isProperty=False) + for f in outputVars: + self.query.setLowerBound(f, -1.0) + self.query.setUpperBound(f, 1.0) + + def cleanShapes(self): + """Remove unused shapes + + After constructing equations, remove shapes from self.shapeMap that are part of the graph but not + relevant for this input query. This is only cosmetic and does not impact Marabou. + + :meta private: + """ + for nodeName in [name for name in self.shapeMap]: + if nodeName not in self.varMap and nodeName not in self.constantMap: + self.shapeMap.pop(nodeName) + + def reassignVariable(self, var, numInVars, outVars, newOutVars): + """Reassign output variable so that output variables follow input variables + + This function computes what the given variable should be when the output variables are + moved to come after the input variables. + + Args: + var (int): Original variable number + numInVars (int): Number of input variables + outVars (numpy array of int): Original output variables + newOutVars (numpy array of int): New output variables + + Returns: + (int): New variable assignment + + :meta private: + """ + if var < numInVars: + return var + if var in outVars: + ind = np.where(var == outVars)[0][0] + return newOutVars[ind] + return var + len([outVar for outVar in outVars if outVar > var]) + + def reassignOutputVariables(self): + """Reassign output variables so output variable numbers follow input variable numbers + + Other input parsers assign output variables after input variables and before any intermediate variables. + This function reassigns the numbers for the output variables and shifts all other variables up to make space. + + :meta private: + """ + for outputName in self.outputNames: + if outputName in self.constantMap: + raise RuntimeError("Output variable %s is a constant, not the output of equations!" % outputName) + outVars = np.concatenate([self.varMap[outputName].reshape(-1) for outputName in self.outputNames]) + numInVars = np.sum([np.prod(self.shapeMap[inputName]) for inputName in self.inputNames]) + numOutVars = len(outVars) + newOutVars = np.array(range(numInVars, numInVars+numOutVars)) + + # Adjust equation variables + for eq in self.query.equList: + for i, (c,var) in enumerate(eq.addendList): + eq.addendList[i] = (c, self.reassignVariable(var, numInVars, outVars, newOutVars)) + + # Adjust equation variables + for eq in self.query.additionalEquList: + for i, (c,var) in enumerate(eq.addendList): + eq.addendList[i] = (c, self.reassignVariable(var, numInVars, outVars, newOutVars)) + + # Adjust relu list + for i, variables in enumerate(self.query.reluList): + self.query.reluList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) + + # Adjust relu list + for i, variables in enumerate(self.query.leakyReluList): + self.query.leakyReluList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) + + # Adjust sigmoid list + for i, variables in enumerate(self.query.sigmoidList): + self.query.sigmoidList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) + + # Adjust bilinear list + for i, variables in enumerate(self.query.bilinearList): + self.query.bilinearList[i] = tuple([self.reassignVariable(var, numInVars, outVars, newOutVars) for var in variables]) + + # Adjust softmax list + for i, (inputs, outputs) in enumerate(self.query.softmaxList): + newInputs = [] + for var in inputs: + newInputs.append(self.reassignVariable(var, numInVars, outVars, newOutVars)) + newOutputs = [] + for var in outputs: + newOutputs.append(self.reassignVariable(var, numInVars, outVars, newOutVars)) + + self.query.softmaxList[i] = (newInputs, newOutputs) + + # Adjust max pool list + for i, (elements, outVar) in enumerate(self.query.maxList): + newOutVar = self.reassignVariable(outVar, numInVars, outVars, newOutVars) + newElements = set() + for var in elements: + newElements.add(self.reassignVariable(var, numInVars, outVars, newOutVars)) + self.query.maxList[i] = (newElements, newOutVar) + + # Adjust upper/lower bounds + newLowerBounds = dict() + newUpperBounds = dict() + for var in self.query.lowerBounds: + newLowerBounds[self.reassignVariable(var, numInVars, outVars, newOutVars)] = self.query.lowerBounds[var] + for var in self.query.upperBounds: + newUpperBounds[self.reassignVariable(var, numInVars, outVars, newOutVars)] = self.query.upperBounds[var] + self.query.lowerBounds = newLowerBounds + self.query.upperBounds = newUpperBounds + + # Assign output variables to the new array + for outputName in self.outputNames: + numVars = len(self.varMap[outputName].reshape(-1)) + self.varMap[outputName] = newOutVars[:numVars].reshape(self.shapeMap[outputName]) + newOutVars = newOutVars[numVars:] + + self.query.outputVars = [self.varMap[outputName] for outputName in self.outputNames] + +def getBroadcastShape(shape1, shape2): + """Helper function to get the shape that results from broadcasting these shapes together + + Args: + shape1 (list of int): First shape + shape2 (list of int): Second shape + + Returns: + (list of int): Broadcast shape + + :meta private: + """ + return [l1 if l1 == l2 else max(l1, l2) for l1, l2 in itertools.zip_longest(shape1[::-1], shape2[::-1], fillvalue=1)][::-1] diff --git a/maraboupy/test/test_equation.py b/maraboupy/test/test_equation.py index 7514c5ad39..46196ded5c 100644 --- a/maraboupy/test/test_equation.py +++ b/maraboupy/test/test_equation.py @@ -127,4 +127,3 @@ def load_network(): network.setUpperBound(inputVars[1], 10.0) return network - diff --git a/maraboupy/test/test_network.py b/maraboupy/test/test_network.py index a127c99466..9644b40df3 100644 --- a/maraboupy/test/test_network.py +++ b/maraboupy/test/test_network.py @@ -123,15 +123,15 @@ def test_concat(): filename = "concat/concat_axis_0.onnx" network = loadNetworkInONNX(filename) - # Y = concat(X1, X2) + X3 - assert network.shapeMap['X1'] == [2, 2, 2] - assert network.shapeMap['X2'] == [2, 2, 2] - assert network.shapeMap['X3'] == [4, 2, 2] - assert network.shapeMap['Y'] == [4, 2, 2] - inputVars = network.inputVars outputVars = network.outputVars[0] + # Y = concat(X1, X2) + X3 + assert inputVars[0].shape == (2, 2, 2) + assert inputVars[1].shape == (2, 2, 2) + assert inputVars[2].shape == (4, 2, 2) + assert outputVars.shape == (4, 2, 2) + # set bounds for X1 and X2 num = 1 for i in range(len(inputVars[0])): @@ -172,15 +172,15 @@ def test_concat(): filename = "concat/concat_axis_1.onnx" network = loadNetworkInONNX(filename) - # Y = concat(X1, X2) + X3 - assert network.shapeMap['X1'] == [2, 2, 2] - assert network.shapeMap['X2'] == [2, 2, 2] - assert network.shapeMap['X3'] == [2, 4, 2] - assert network.shapeMap['Y'] == [2, 4, 2] - inputVars = network.inputVars outputVars = network.outputVars[0] + # Y = concat(X1, X2) + X3 + assert inputVars[0].shape == (2, 2, 2) + assert inputVars[1].shape == (2, 2, 2) + assert inputVars[2].shape == (2, 4, 2) + assert outputVars.shape == (2, 4, 2) + # set bounds for X1 and X2 num = 1 for i in range(len(inputVars[0])): @@ -221,15 +221,15 @@ def test_concat(): filename = "concat/concat_axis_2.onnx" network = loadNetworkInONNX(filename) - # Y = concat(X1, X2) + X3 - assert network.shapeMap['X1'] == [2, 2, 2] - assert network.shapeMap['X2'] == [2, 2, 2] - assert network.shapeMap['X3'] == [2, 2, 4] - assert network.shapeMap['Y'] == [2, 2, 4] - inputVars = network.inputVars outputVars = network.outputVars[0] + # Y = concat(X1, X2) + X3 + assert inputVars[0].shape == (2, 2, 2) + assert inputVars[1].shape == (2, 2, 2) + assert inputVars[2].shape == (2, 2, 4) + assert outputVars.shape == (2, 2, 4) + # set bounds for X1 and X2 num = 1 for i in range(len(inputVars[0])): @@ -279,11 +279,12 @@ def test_split(): # output1 network = loadNetworkInONNX(filename, outputName='Y1') - assert network.shapeMap['X'] == [6] - assert network.shapeMap['Y1'] == (2,) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (6,) + assert outputVars.shape == (2,) + for i in range(len(inputVars)): network.setLowerBound(inputVars[i], i + 1) network.setUpperBound(inputVars[i], i + 1) @@ -293,11 +294,12 @@ def test_split(): # output2 network = loadNetworkInONNX(filename, outputName='Y2') - assert network.shapeMap['X'] == [6] - assert network.shapeMap['Y2'] == (2,) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (6,) + assert outputVars.shape == (2,) + for i in range(len(inputVars)): network.setLowerBound(inputVars[i], i + 1) network.setUpperBound(inputVars[i], i + 1) @@ -307,11 +309,12 @@ def test_split(): # output3 network = loadNetworkInONNX(filename, outputName='Y3') - assert network.shapeMap['X'] == [6] - assert network.shapeMap['Y3'] == (2,) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (6,) + assert outputVars.shape == (2,) + for i in range(len(inputVars)): network.setLowerBound(inputVars[i], i + 1) network.setUpperBound(inputVars[i], i + 1) @@ -320,17 +323,18 @@ def test_split(): assert abs(vals[outputVars[1]] - 18.0) < TOL # input: 1d - # split: [2, 4] (array) + # split: (2, 4) (array) # axis: 0 filename = 'split/split_1d_split-2-4_axis-0.onnx' # output1 network = loadNetworkInONNX(filename, outputName='Y1') - assert network.shapeMap['X'] == [6] - assert network.shapeMap['Y1'] == (2,) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (6,) + assert outputVars.shape == (2,) + for i in range(len(inputVars)): network.setLowerBound(inputVars[i], i + 1) network.setUpperBound(inputVars[i], i + 1) @@ -340,11 +344,12 @@ def test_split(): # output2 network = loadNetworkInONNX(filename, outputName='Y2') - assert network.shapeMap['X'] == [6] - assert network.shapeMap['Y2'] == (4,) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (6,) + assert outputVars.shape == (4,) + for i in range(len(inputVars)): network.setLowerBound(inputVars[i], i + 1) network.setUpperBound(inputVars[i], i + 1) @@ -361,11 +366,12 @@ def test_split(): # output1 network = loadNetworkInONNX(filename, outputName='Y1') - assert network.shapeMap['X'] == [2, 6] - assert network.shapeMap['Y1'] == (2, 3) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (2, 6) + assert outputVars.shape == (2, 3) + for i in range(len(inputVars)): for j in range(len(inputVars[i])): network.setLowerBound(inputVars[i][j], (len(inputVars[i])) * i + j + 1) @@ -380,11 +386,12 @@ def test_split(): # output2 network = loadNetworkInONNX(filename, outputName='Y2') - assert network.shapeMap['X'] == [2, 6] - assert network.shapeMap['Y2'] == (2, 3) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (2, 6) + assert outputVars.shape == (2, 3) + for i in range(len(inputVars)): for j in range(len(inputVars[i])): network.setLowerBound(inputVars[i][j], (len(inputVars[i])) * i + j + 1) @@ -404,11 +411,12 @@ def test_split(): # output1 network = loadNetworkInONNX(filename, outputName='Y1') - assert network.shapeMap['X'] == [2, 6] - assert network.shapeMap['Y1'] == (2, 2) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (2, 6) + assert outputVars.shape == (2, 2) + for i in range(len(inputVars)): for j in range(len(inputVars[i])): network.setLowerBound(inputVars[i][j], (len(inputVars[i])) * i + j + 1) @@ -421,11 +429,12 @@ def test_split(): # output2 network = loadNetworkInONNX(filename, outputName='Y2') - assert network.shapeMap['X'] == [2, 6] - assert network.shapeMap['Y2'] == (2, 4) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (2, 6) + assert outputVars.shape == (2, 4) + for i in range(len(inputVars)): for j in range(len(inputVars[i])): network.setLowerBound(inputVars[i][j], (len(inputVars[i])) * i + j + 1) @@ -448,11 +457,12 @@ def test_split(): # output1 network = loadNetworkInONNX(filename, outputName='Y1') - assert network.shapeMap['X'] == [1, 1, 1, 1, 85] - assert network.shapeMap['Y1'] == (1, 1, 1, 1, 2) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (1, 1, 1, 1, 85) + assert outputVars.shape == (1, 1, 1, 1, 2) + for i in range(85): network.setLowerBound(inputVars[0][0][0][0][i], i + 1) network.setUpperBound(inputVars[0][0][0][0][i], i + 1) @@ -462,11 +472,12 @@ def test_split(): # output2 network = loadNetworkInONNX(filename, outputName='Y2') - assert network.shapeMap['X'] == [1, 1, 1, 1, 85] - assert network.shapeMap['Y2'] == (1, 1, 1, 1, 2) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (1, 1, 1, 1, 85) + assert outputVars.shape == (1, 1, 1, 1, 2) + for i in range(85): network.setLowerBound(inputVars[0][0][0][0][i], i + 1) network.setUpperBound(inputVars[0][0][0][0][i], i + 1) @@ -476,11 +487,12 @@ def test_split(): # output3 network = loadNetworkInONNX(filename, outputName='Y3') - assert network.shapeMap['X'] == [1, 1, 1, 1, 85] - assert network.shapeMap['Y3'] == (1, 1, 1, 1, 81) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (1, 1, 1, 1, 85) + assert outputVars.shape == (1, 1, 1, 1, 81) + for i in range(85): network.setLowerBound(inputVars[0][0][0][0][i], i + 1) network.setUpperBound(inputVars[0][0][0][0][i], i + 1) @@ -497,11 +509,12 @@ def test_split(): # output1 network = loadNetworkInONNX(filename, outputName='Y') - assert network.shapeMap['X'] == [1, 1, 1, 1, 85] - assert network.shapeMap['Y'] == (1, 1, 1, 1, 85) inputVars = network.inputVars[0] outputVars = network.outputVars[0] + assert inputVars.shape == (1, 1, 1, 1, 85) + assert outputVars.shape == (1, 1, 1, 1, 85) + for i in range(85): network.setLowerBound(inputVars[0][0][0][0][i], i + 1) network.setUpperBound(inputVars[0][0][0][0][i], i + 1) @@ -517,10 +530,12 @@ def test_resize(): filename = 'resize/resize_4dims.onnx' network = loadNetworkInONNX(filename, outputName='Y') - assert network.shapeMap['X'] == [1, 3, 2, 2] - assert network.shapeMap['Y'] == (1, 3, 4, 4) inputVars = network.inputVars[0] + outputVars = network.outputVars[0] + assert inputVars.shape == (1, 3, 2, 2) + assert outputVars.shape == (1, 3, 4, 4) + inputValues = np.array( [[[[ 1., 2.], [ 3., 4.]], @@ -542,7 +557,6 @@ def test_resize(): # solve _, vals, _ = network.solve(options = options) - outputVars = network.outputVars[0] expectedOutputValues = np.array( [[[[ 1., 1., 2., 2.], [ 1., 1., 2., 2.], diff --git a/maraboupy/test/test_nnet_extensions.py b/maraboupy/test/test_nnet_extensions.py index ac23465b8b..dc60da799e 100644 --- a/maraboupy/test/test_nnet_extensions.py +++ b/maraboupy/test/test_nnet_extensions.py @@ -188,7 +188,7 @@ def test_bound_getters(): """ nnet_object = Marabou.read_nnet(filename=NETWORK_FILENAME, normalize=False) - ipq = nnet_object.getMarabouQuery() + ipq = nnet_object.getInputQuery() num_input_vars = ipq.getNumInputVariables() assert num_input_vars == nnet_object.inputSize diff --git a/maraboupy/test/test_onnx.py b/maraboupy/test/test_onnx.py index 4033033c32..d04c21fd12 100644 --- a/maraboupy/test/test_onnx.py +++ b/maraboupy/test/test_onnx.py @@ -204,15 +204,14 @@ def test_multiOutput(): filename = "conv_mp1_intermediateOutput.onnx" evaluateFile(filename) -def test_shallow_clear(): +def test_preserve_existing_constraints_clear(): filename = "tanh_test.onnx" filename = os.path.join(os.path.dirname(__file__), NETWORK_FOLDER, filename) network = Marabou.read_onnx(filename, reindexOutputVars=False) numVar1 = network.numVars numEq1 = len(network.equList) numSigmoid1 = len(network.sigmoidList) - network.shallowClear() - network.readONNX(filename, None, None, reindexOutputVars=False) + network.readONNX(filename, None, None, reindexOutputVars=False, preserveExistingConstraints=True) numVar2 = network.numVars numEq2 = len(network.equList) numSigmoid2 = len(network.sigmoidList) @@ -389,4 +388,4 @@ def evaluateIntermediateLayers(filename, inputNames = None, outputNames = None, marabouEval_end = endNetwork.evaluateWithMarabou([marabouEval_start], options = OPT, filename = "") assert len(marabouEval_end) == len(onnxEval_full) for i in range(len(marabouEval_end)): - assert max(abs(marabouEval_end[i].flatten() - onnxEval_full[i].flatten())) < TOL + assert max(abs(marabouEval_end[i].flatten() - onnxEval_full[i].flatten())) < TOL \ No newline at end of file diff --git a/maraboupy/test/test_query.py b/maraboupy/test/test_query.py index 590932c537..fc36b09f6c 100644 --- a/maraboupy/test/test_query.py +++ b/maraboupy/test/test_query.py @@ -109,7 +109,7 @@ def test_get_marabou_query(tmpdir): Marabou from the same network file (relies on saveQuery, compares the output of saveQuery on the two queries). ''' network = load_acas_network() - ipq1 = network.getMarabouQuery() + ipq1 = network.getInputQuery() ipq2 = MarabouCore.InputQuery() network_filename = os.path.join(os.path.dirname(__file__), ACAS_FILE) diff --git a/resources/runMarabou.py b/resources/runMarabou.py index 3c78e8d5b8..57e46fb866 100755 --- a/resources/runMarabou.py +++ b/resources/runMarabou.py @@ -66,23 +66,23 @@ def createQuery(args): return None, None if args.prop != None: - query = network.getMarabouQuery() + query = network.getInputQuery() MarabouCore.loadProperty(query, args.prop) return query, network if args.dataset == 'mnist': encode_mnist_linf(network, args.index, args.epsilon, args.target_label) - return network.getMarabouQuery(), network + return network.getInputQuery(), network elif args.dataset == 'cifar10': encode_cifar10_linf(network, args.index, args.epsilon, args.target_label) - return network.getMarabouQuery(), network + return network.getInputQuery(), network else: """ ENCODE YOUR CUSTOMIZED PROPERTY HERE! """ print("No property encoded!") - return network.getMarabouQuery(), network + return network.getInputQuery(), network def encode_mnist_linf(network, index, epsilon, target_label): from tensorflow.keras.datasets import mnist