diff --git a/src/engine/ReluConstraint.cpp b/src/engine/ReluConstraint.cpp index e4b52b1d2..1f7b71f71 100644 --- a/src/engine/ReluConstraint.cpp +++ b/src/engine/ReluConstraint.cpp @@ -40,8 +40,6 @@ ReluConstraint::ReluConstraint( unsigned b, unsigned f ) , _direction( PHASE_NOT_FIXED ) , _haveEliminatedVariables( false ) , _tighteningRow( NULL ) - , _previousBias( 0.0 ) - , _hasPreviousBias( false ) { } @@ -1042,18 +1040,7 @@ double ReluConstraint::computeBaBsr() const if ( !_networkLevelReasoner ) throw MarabouError( MarabouError::NETWORK_LEVEL_REASONER_NOT_AVAILABLE ); - double biasTerm; - - if ( _hasPreviousBias ) - { - biasTerm = _previousBias; - } - else - { - biasTerm = _networkLevelReasoner->getPreviousBias( this ); - _hasPreviousBias = true; - _previousBias = biasTerm; - } + double biasTerm = _networkLevelReasoner->getPreviousBias( this ); // get upper and lower bounds double ub = getUpperBound( _b ); diff --git a/src/engine/ReluConstraint.h b/src/engine/ReluConstraint.h index 719448a45..5821813e3 100644 --- a/src/engine/ReluConstraint.h +++ b/src/engine/ReluConstraint.h @@ -305,12 +305,6 @@ class ReluConstraint : public PiecewiseLinearConstraint Assign a variable as an aux variable by the tableau, related to some existing aux variable. */ void addTableauAuxVar( unsigned tableauAuxVar, unsigned constraintAuxVar ) override; - - /* - cached biases for the source layers for ReLU neurons. - */ - mutable double _previousBias; - mutable bool _hasPreviousBias; }; #endif // __ReluConstraint_h__ diff --git a/src/nlr/NetworkLevelReasoner.cpp b/src/nlr/NetworkLevelReasoner.cpp index 477c30e3d..8390a0190 100644 --- a/src/nlr/NetworkLevelReasoner.cpp +++ b/src/nlr/NetworkLevelReasoner.cpp @@ -596,69 +596,78 @@ void NetworkLevelReasoner::generateLinearExpressionForWeightedSumLayer( } /* - Get ReLU bias given a ReLU constraint - Used for BaBSR Heuristic + Initialize and fill ReLU Constraint to previous bias map + for BaBSR Heuristic */ -double NetworkLevelReasoner::getPreviousBias( const ReluConstraint *reluConstraint ) const +void NetworkLevelReasoner::initializePreviousBiasMap() { - // Find the index of the reluConstraint in the _constraintsInTopologicalOrder list - unsigned constraintIndex = 0; - bool foundConstraint = false; - - for ( const auto &constraint : _constraintsInTopologicalOrder ) - { - if ( constraint == reluConstraint ) - { - foundConstraint = true; - break; - } - ++constraintIndex; - } + // Clear the previous bias map + _previousBiases.clear(); - // If the constraint was not found, throw an error - if ( !foundConstraint ) - { - throw NLRError( NLRError::RELU_NOT_FOUND, - "ReluConstraint not found in topological order." ); - } - - // Traverse through layers and find the corresponding ReLU layer - const NLR::Layer *reluLayer = nullptr; + // Track accumulated ReLU neurons across layers unsigned accumulatedNeurons = 0; + // Iterate through layers to find ReLU layers and their sources for ( const auto &layerPair : _layerIndexToLayer ) { const NLR::Layer *layer = layerPair.second; - // Only process ReLU layers if ( layer->getLayerType() == Layer::RELU ) { - unsigned layerSize = layer->getSize(); // Cache size of the current layer + // Get source layer info + const auto &sourceLayers = layer->getSourceLayers(); + unsigned sourceLayerIndex = sourceLayers.begin()->first; + const NLR::Layer *sourceLayer = getLayer( sourceLayerIndex ); + + // Match ReLU constraints to their source layer biases + unsigned layerSize = layer->getSize(); + + // Iterate through constraints + auto constraintIterator = _constraintsInTopologicalOrder.begin(); + for ( unsigned currentIndex = 0; + currentIndex < accumulatedNeurons && + constraintIterator != _constraintsInTopologicalOrder.end(); + ++currentIndex, ++constraintIterator ) + { + } - // Check if the constraint belongs to this layer (based on neuron counts) - if ( constraintIndex < accumulatedNeurons + layerSize ) + // Now at correct starting position + for ( unsigned i = 0; + i < layerSize && constraintIterator != _constraintsInTopologicalOrder.end(); + ++i, ++constraintIterator ) { - // This is the ReLU layer corresponding to the constraint - reluLayer = layer; - break; + if ( auto reluConstraint = + dynamic_cast( *constraintIterator ) ) + { + // Store bias in map + _previousBiases[reluConstraint] = sourceLayer->getBias( i ); + } } - accumulatedNeurons += layerSize; // Update only if it's a ReLU layer + + accumulatedNeurons += layerSize; } } +} - // If we couldn't find the ReLU layer, throw an error - if ( !reluLayer ) - throw NLRError( NLRError::LAYER_TYPE_NOT_SUPPORTED, "Expected a ReLU layer." ); - - // Find the source layer feeding into this ReLU layer - const auto &sourceLayers = reluLayer->getSourceLayers(); - unsigned sourceLayerIndex = sourceLayers.begin()->first; // Cache source layer index +/* + Get previous layer bias of a ReLU neuron + for BaBSR Heuristic +*/ +double NetworkLevelReasoner::getPreviousBias( const ReluConstraint *reluConstraint ) const +{ + // Initialize map if empty + if ( _previousBiases.empty() ) + { + const_cast( this )->initializePreviousBiasMap(); + } - const NLR::Layer *sourceLayer = getLayer( sourceLayerIndex ); + // Look up pre-computed bias + if ( !_previousBiases.exists( reluConstraint ) ) + { + throw NLRError( NLRError::RELU_NOT_FOUND, "ReluConstraint not found in bias map." ); + } - // Retrieve the bias for the corresponding neuron in the source layer - unsigned reluNeuron = constraintIndex - accumulatedNeurons; - return sourceLayer->getBias( reluNeuron ); + return _previousBiases[reluConstraint]; } unsigned diff --git a/src/nlr/NetworkLevelReasoner.h b/src/nlr/NetworkLevelReasoner.h index cc5d005d9..2660795be 100644 --- a/src/nlr/NetworkLevelReasoner.h +++ b/src/nlr/NetworkLevelReasoner.h @@ -172,6 +172,10 @@ class NetworkLevelReasoner : public LayerOwner */ void generateQuery( Query &query ); + /* + Given a ReLU Constraint, get the previous layer bias + for the BaBSR Heuristic + */ double getPreviousBias( const ReluConstraint *reluConstraint ) const; /* @@ -238,6 +242,13 @@ class NetworkLevelReasoner : public LayerOwner unsigned outputDimension ); void reduceLayerIndex( unsigned layer, unsigned startIndex ); + /* + Store previous biases for each ReLU neuron in a map for getPreviousBias() + and BaBSR heuristic + */ + Map _previousBiases; + void initializePreviousBiasMap(); + /* If the NLR is manipulated manually in order to generate a new input query, this method can be used to assign variable indices