Skip to content

Commit

Permalink
Do not add dropout layer at all if None
Browse files Browse the repository at this point in the history
  • Loading branch information
ltriess committed Jun 1, 2021
1 parent 7ea5d07 commit 7fcaf2a
Showing 1 changed file with 13 additions and 3 deletions.
16 changes: 13 additions & 3 deletions pointnet2/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class Classifier(tf.keras.models.Model):
Paper default is [512, 256, 40].
dropout_rate : float or List[float]
The dropout ratio applied after each dense layer. Paper default is 0.5~0.6.
If None, no dropout will be used.
feature_norm : str or List[str]
The feature normalization to use. Can be `batch` for batch normalization or
`layer` for layer normalization. If None, no normalization is applied.
Expand Down Expand Up @@ -74,7 +75,8 @@ def __init__(
else:
pass
self.model.add(tf.keras.layers.LeakyReLU())
self.model.add(tf.keras.layers.Dropout(rate=dropout_rate[i]))
if dropout_rate[i] is not None:
self.model.add(tf.keras.layers.Dropout(rate=dropout_rate[i]))
self.model.add(tf.keras.layers.Dense(units[-1]))

def call(
Expand Down Expand Up @@ -105,6 +107,9 @@ class SegmentationModel(tf.keras.models.Model):
amount of layers than the feature extractor.
num_classes: int
The number of classes to make predictions for.
dropout_rate : float or List[float]
The dropout ratio applied in each block. Paper default is 0.5.
If None, no dropout will be used.
feature_norm : str
The feature normalization to use. Can be `batch` for batch normalization
or `layer` for layer normalization. If None, no normalization is applied.
Expand All @@ -115,7 +120,11 @@ class SegmentationModel(tf.keras.models.Model):
"""

def __init__(
self, fp_units: List[List[int]], num_classes: int, feature_norm: str = None
self,
fp_units: List[List[int]],
num_classes: int,
dropout_rate: float = 0.5,
feature_norm: str = None,
):
super().__init__()

Expand All @@ -138,7 +147,8 @@ def __init__(
else:
pass
self.head.add(tf.keras.layers.LeakyReLU())
self.head.add(tf.keras.layers.Dropout(rate=0.5))
if dropout_rate is not None:
self.head.add(tf.keras.layers.Dropout(rate=dropout_rate))
self.head.add(
tf.keras.layers.Conv1D(num_classes, kernel_size=1, padding="valid")
)
Expand Down

0 comments on commit 7fcaf2a

Please sign in to comment.