]> Gitweb @ Texas Instruments - Open Source Git Repositories - git.TI.com/gitweb - jacinto-ai/pytorch-jacinto-ai-devkit.git/blobdiff - modules/pytorch_jacinto_ai/xnn/quantize/quant_train_module.py
torch.nn.ReLU is the recommended activation module. removed the custom defined module...
[jacinto-ai/pytorch-jacinto-ai-devkit.git] / modules / pytorch_jacinto_ai / xnn / quantize / quant_train_module.py
index b1a882d07534ff19fcbc606038651f956a97bcbd..d7fbf0386efa99d27a16ffe7916440a6a6d493bf 100644 (file)
@@ -98,7 +98,7 @@ class QuantTrainModule(QuantBaseModule):
                 elif isinstance(m, layers.NoAct):
                     new_m = QuantTrainPAct2(signed=None, bitwidth_weights=self.bitwidth_weights, bitwidth_activations=self.bitwidth_activations,
                                              per_channel_q=self.per_channel_q)
-                elif isinstance(m, (torch.nn.ReLU, torch.nn.ReLU6, layers.ReLUN)):
+                elif isinstance(m, (torch.nn.ReLU, torch.nn.ReLU6)):
                     new_m = QuantTrainPAct2(signed=False, bitwidth_weights=self.bitwidth_weights, bitwidth_activations=self.bitwidth_activations,
                                              per_channel_q=self.per_channel_q)
                 else: