Skip to content

Commit

Permalink
Refactor previous bias handling to store them upfront in the NLR
Browse files Browse the repository at this point in the history
  • Loading branch information
liamjdavis committed Jan 12, 2025
1 parent abf1db1 commit 6e6d405
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 64 deletions.
15 changes: 1 addition & 14 deletions src/engine/ReluConstraint.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,6 @@ ReluConstraint::ReluConstraint( unsigned b, unsigned f )
, _direction( PHASE_NOT_FIXED )
, _haveEliminatedVariables( false )
, _tighteningRow( NULL )
, _previousBias( 0.0 )
, _hasPreviousBias( false )
{
}

Expand Down Expand Up @@ -1042,18 +1040,7 @@ double ReluConstraint::computeBaBsr() const
if ( !_networkLevelReasoner )
throw MarabouError( MarabouError::NETWORK_LEVEL_REASONER_NOT_AVAILABLE );

double biasTerm;

if ( _hasPreviousBias )
{
biasTerm = _previousBias;
}
else
{
biasTerm = _networkLevelReasoner->getPreviousBias( this );
_hasPreviousBias = true;
_previousBias = biasTerm;
}
double biasTerm = _networkLevelReasoner->getPreviousBias( this );

// get upper and lower bounds
double ub = getUpperBound( _b );
Expand Down
6 changes: 0 additions & 6 deletions src/engine/ReluConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -305,12 +305,6 @@ class ReluConstraint : public PiecewiseLinearConstraint
Assign a variable as an aux variable by the tableau, related to some existing aux variable.
*/
void addTableauAuxVar( unsigned tableauAuxVar, unsigned constraintAuxVar ) override;

/*
cached biases for the source layers for ReLU neurons.
*/
mutable double _previousBias;
mutable bool _hasPreviousBias;
};

#endif // __ReluConstraint_h__
97 changes: 53 additions & 44 deletions src/nlr/NetworkLevelReasoner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -596,69 +596,78 @@ void NetworkLevelReasoner::generateLinearExpressionForWeightedSumLayer(
}

/*
Get ReLU bias given a ReLU constraint
Used for BaBSR Heuristic
Initialize and fill ReLU Constraint to previous bias map
for BaBSR Heuristic
*/
double NetworkLevelReasoner::getPreviousBias( const ReluConstraint *reluConstraint ) const
void NetworkLevelReasoner::initializePreviousBiasMap()
{
// Find the index of the reluConstraint in the _constraintsInTopologicalOrder list
unsigned constraintIndex = 0;
bool foundConstraint = false;

for ( const auto &constraint : _constraintsInTopologicalOrder )
{
if ( constraint == reluConstraint )
{
foundConstraint = true;
break;
}
++constraintIndex;
}
// Clear the previous bias map
_previousBiases.clear();

// If the constraint was not found, throw an error
if ( !foundConstraint )
{
throw NLRError( NLRError::RELU_NOT_FOUND,
"ReluConstraint not found in topological order." );
}

// Traverse through layers and find the corresponding ReLU layer
const NLR::Layer *reluLayer = nullptr;
// Track accumulated ReLU neurons across layers
unsigned accumulatedNeurons = 0;

// Iterate through layers to find ReLU layers and their sources
for ( const auto &layerPair : _layerIndexToLayer )
{
const NLR::Layer *layer = layerPair.second;

// Only process ReLU layers
if ( layer->getLayerType() == Layer::RELU )
{
unsigned layerSize = layer->getSize(); // Cache size of the current layer
// Get source layer info
const auto &sourceLayers = layer->getSourceLayers();
unsigned sourceLayerIndex = sourceLayers.begin()->first;
const NLR::Layer *sourceLayer = getLayer( sourceLayerIndex );

// Match ReLU constraints to their source layer biases
unsigned layerSize = layer->getSize();

// Iterate through constraints
auto constraintIterator = _constraintsInTopologicalOrder.begin();
for ( unsigned currentIndex = 0;
currentIndex < accumulatedNeurons &&
constraintIterator != _constraintsInTopologicalOrder.end();
++currentIndex, ++constraintIterator )
{
}

// Check if the constraint belongs to this layer (based on neuron counts)
if ( constraintIndex < accumulatedNeurons + layerSize )
// Now at correct starting position
for ( unsigned i = 0;
i < layerSize && constraintIterator != _constraintsInTopologicalOrder.end();
++i, ++constraintIterator )
{
// This is the ReLU layer corresponding to the constraint
reluLayer = layer;
break;
if ( auto reluConstraint =
dynamic_cast<const ReluConstraint *>( *constraintIterator ) )
{
// Store bias in map
_previousBiases[reluConstraint] = sourceLayer->getBias( i );
}
}
accumulatedNeurons += layerSize; // Update only if it's a ReLU layer

accumulatedNeurons += layerSize;
}
}
}

// If we couldn't find the ReLU layer, throw an error
if ( !reluLayer )
throw NLRError( NLRError::LAYER_TYPE_NOT_SUPPORTED, "Expected a ReLU layer." );

// Find the source layer feeding into this ReLU layer
const auto &sourceLayers = reluLayer->getSourceLayers();
unsigned sourceLayerIndex = sourceLayers.begin()->first; // Cache source layer index
/*
Get previous layer bias of a ReLU neuron
for BaBSR Heuristic
*/
double NetworkLevelReasoner::getPreviousBias( const ReluConstraint *reluConstraint ) const
{
// Initialize map if empty
if ( _previousBiases.empty() )
{
const_cast<NetworkLevelReasoner *>( this )->initializePreviousBiasMap();
}

const NLR::Layer *sourceLayer = getLayer( sourceLayerIndex );
// Look up pre-computed bias
if ( !_previousBiases.exists( reluConstraint ) )
{
throw NLRError( NLRError::RELU_NOT_FOUND, "ReluConstraint not found in bias map." );
}

// Retrieve the bias for the corresponding neuron in the source layer
unsigned reluNeuron = constraintIndex - accumulatedNeurons;
return sourceLayer->getBias( reluNeuron );
return _previousBiases[reluConstraint];
}

unsigned
Expand Down
11 changes: 11 additions & 0 deletions src/nlr/NetworkLevelReasoner.h
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,10 @@ class NetworkLevelReasoner : public LayerOwner
*/
void generateQuery( Query &query );

/*
Given a ReLU Constraint, get the previous layer bias
for the BaBSR Heuristic
*/
double getPreviousBias( const ReluConstraint *reluConstraint ) const;

/*
Expand Down Expand Up @@ -238,6 +242,13 @@ class NetworkLevelReasoner : public LayerOwner
unsigned outputDimension );
void reduceLayerIndex( unsigned layer, unsigned startIndex );

/*
Store previous biases for each ReLU neuron in a map for getPreviousBias()
and BaBSR heuristic
*/
Map<const ReluConstraint *, double> _previousBiases;
void initializePreviousBiasMap();

/*
If the NLR is manipulated manually in order to generate a new
input query, this method can be used to assign variable indices
Expand Down

0 comments on commit 6e6d405

Please sign in to comment.