1 #include <cstring>
2 #include <cuda_runtime.h>
4 #include "gtest/gtest.h"
5 #include "caffeine/blob.hpp"
6 #include "caffeine/common.hpp"
7 #include "caffeine/filler.hpp"
8 #include "caffeine/vision_layers.hpp"
9 #include "caffeine/test/test_gradient_check_util.hpp"
11 namespace caffeine {
13 template <typename Dtype>
14 class NeuronLayerTest : public ::testing::Test {
15 protected:
16 NeuronLayerTest()
17 : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
18 blob_top_(new Blob<Dtype>()) {
19 // fill the values
20 FillerParameter filler_param;
21 GaussianFiller<Dtype> filler(filler_param);
22 filler.Fill(this->blob_bottom_);
23 blob_bottom_vec_.push_back(blob_bottom_);
24 blob_top_vec_.push_back(blob_top_);
25 };
26 virtual ~NeuronLayerTest() { delete blob_bottom_; delete blob_top_; }
27 Blob<Dtype>* const blob_bottom_;
28 Blob<Dtype>* const blob_top_;
29 vector<Blob<Dtype>*> blob_bottom_vec_;
30 vector<Blob<Dtype>*> blob_top_vec_;
31 };
33 typedef ::testing::Types<float, double> Dtypes;
34 TYPED_TEST_CASE(NeuronLayerTest, Dtypes);
36 TYPED_TEST(NeuronLayerTest, TestReLUCPU) {
37 LayerParameter layer_param;
38 Caffeine::set_mode(Caffeine::CPU);
39 ReLULayer<TypeParam> layer(layer_param);
40 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
41 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
42 // Now, check values
43 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
44 const TypeParam* top_data = this->blob_top_->cpu_data();
45 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
46 EXPECT_GE(top_data[i], 0.);
47 EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
48 }
49 }
52 TYPED_TEST(NeuronLayerTest, TestReLUGradientCPU) {
53 LayerParameter layer_param;
54 Caffeine::set_mode(Caffeine::CPU);
55 ReLULayer<TypeParam> layer(layer_param);
56 GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
57 checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
58 }
61 TYPED_TEST(NeuronLayerTest, TestReLUGPU) {
62 LayerParameter layer_param;
63 Caffeine::set_mode(Caffeine::GPU);
64 ReLULayer<TypeParam> layer(layer_param);
65 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
66 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
67 // Now, check values
68 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
69 const TypeParam* top_data = this->blob_top_->cpu_data();
70 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
71 EXPECT_GE(top_data[i], 0.);
72 EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
73 }
74 }
77 TYPED_TEST(NeuronLayerTest, TestReLUGradientGPU) {
78 LayerParameter layer_param;
79 Caffeine::set_mode(Caffeine::GPU);
80 ReLULayer<TypeParam> layer(layer_param);
81 GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
82 checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
83 }
86 TYPED_TEST(NeuronLayerTest, TestDropoutCPU) {
87 LayerParameter layer_param;
88 Caffeine::set_mode(Caffeine::CPU);
89 Caffeine::set_phase(Caffeine::TRAIN);
90 DropoutLayer<TypeParam> layer(layer_param);
91 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
92 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
93 // Now, check values
94 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
95 const TypeParam* top_data = this->blob_top_->cpu_data();
96 float scale = 1. / (1. - layer_param.dropout_ratio());
97 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
98 if (top_data[i] != 0) {
99 EXPECT_EQ(top_data[i], bottom_data[i] * scale);
100 }
101 }
102 }
105 TYPED_TEST(NeuronLayerTest, TestDropoutGradientCPU) {
106 LayerParameter layer_param;
107 Caffeine::set_mode(Caffeine::CPU);
108 DropoutLayer<TypeParam> layer(layer_param);
109 GradientChecker<TypeParam> checker(1e-2, 1e-3);
110 checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
111 }
114 TYPED_TEST(NeuronLayerTest, TestDropoutCPUTestPhase) {
115 LayerParameter layer_param;
116 Caffeine::set_mode(Caffeine::CPU);
117 Caffeine::set_phase(Caffeine::TEST);
118 DropoutLayer<TypeParam> layer(layer_param);
119 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
120 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
121 // Now, check values
122 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
123 const TypeParam* top_data = this->blob_top_->cpu_data();
124 float scale = 1. / (1. - layer_param.dropout_ratio());
125 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
126 if (top_data[i] != 0) {
127 EXPECT_EQ(top_data[i], bottom_data[i]);
128 }
129 }
130 }
133 TYPED_TEST(NeuronLayerTest, TestDropoutGPU) {
134 LayerParameter layer_param;
135 Caffeine::set_mode(Caffeine::GPU);
136 Caffeine::set_phase(Caffeine::TRAIN);
137 DropoutLayer<TypeParam> layer(layer_param);
138 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
139 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
140 // Now, check values
141 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
142 const TypeParam* top_data = this->blob_top_->cpu_data();
143 float scale = 1. / (1. - layer_param.dropout_ratio());
144 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
145 if (top_data[i] != 0) {
146 EXPECT_EQ(top_data[i], bottom_data[i] * scale);
147 }
148 }
149 }
152 TYPED_TEST(NeuronLayerTest, TestDropoutGradientGPU) {
153 LayerParameter layer_param;
154 Caffeine::set_mode(Caffeine::GPU);
155 DropoutLayer<TypeParam> layer(layer_param);
156 GradientChecker<TypeParam> checker(1e-2, 1e-3);
157 checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
158 }
161 TYPED_TEST(NeuronLayerTest, TestDropoutGPUTestPhase) {
162 LayerParameter layer_param;
163 Caffeine::set_mode(Caffeine::GPU);
164 Caffeine::set_phase(Caffeine::TEST);
165 DropoutLayer<TypeParam> layer(layer_param);
166 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
167 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
168 // Now, check values
169 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
170 const TypeParam* top_data = this->blob_top_->cpu_data();
171 float scale = 1. / (1. - layer_param.dropout_ratio());
172 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
173 if (top_data[i] != 0) {
174 EXPECT_EQ(top_data[i], bottom_data[i]);
175 }
176 }
177 }
179 }