5a28b0056cc24e4c10300089f8fd152b19ceb366
1 #include <cstring>
2 #include <cuda_runtime.h>
4 #include "gtest/gtest.h"
5 #include "caffeine/blob.hpp"
6 #include "caffeine/common.hpp"
7 #include "caffeine/filler.hpp"
8 #include "caffeine/vision_layers.hpp"
9 #include "caffeine/test/test_gradient_check_util.hpp"
12 namespace caffeine {
14 extern cudaDeviceProp CAFFEINE_TEST_CUDA_PROP;
16 template <typename Dtype>
17 class NeuronLayerTest : public ::testing::Test {
18 protected:
19 NeuronLayerTest()
20 : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
21 blob_top_(new Blob<Dtype>()) {
22 // fill the values
23 FillerParameter filler_param;
24 GaussianFiller<Dtype> filler(filler_param);
25 filler.Fill(this->blob_bottom_);
26 blob_bottom_vec_.push_back(blob_bottom_);
27 blob_top_vec_.push_back(blob_top_);
28 };
29 virtual ~NeuronLayerTest() { delete blob_bottom_; delete blob_top_; }
30 Blob<Dtype>* const blob_bottom_;
31 Blob<Dtype>* const blob_top_;
32 vector<Blob<Dtype>*> blob_bottom_vec_;
33 vector<Blob<Dtype>*> blob_top_vec_;
34 };
36 typedef ::testing::Types<float, double> Dtypes;
37 TYPED_TEST_CASE(NeuronLayerTest, Dtypes);
39 TYPED_TEST(NeuronLayerTest, TestReLUCPU) {
40 LayerParameter layer_param;
41 Caffeine::set_mode(Caffeine::CPU);
42 ReLULayer<TypeParam> layer(layer_param);
43 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
44 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
45 // Now, check values
46 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
47 const TypeParam* top_data = this->blob_top_->cpu_data();
48 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
49 EXPECT_GE(top_data[i], 0.);
50 EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
51 }
52 }
55 TYPED_TEST(NeuronLayerTest, TestReLUGradientCPU) {
56 LayerParameter layer_param;
57 Caffeine::set_mode(Caffeine::CPU);
58 ReLULayer<TypeParam> layer(layer_param);
59 GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
60 checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
61 }
64 TYPED_TEST(NeuronLayerTest, TestReLUGPU) {
65 LayerParameter layer_param;
66 Caffeine::set_mode(Caffeine::GPU);
67 ReLULayer<TypeParam> layer(layer_param);
68 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
69 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
70 // Now, check values
71 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
72 const TypeParam* top_data = this->blob_top_->cpu_data();
73 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
74 EXPECT_GE(top_data[i], 0.);
75 EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
76 }
77 }
80 TYPED_TEST(NeuronLayerTest, TestReLUGradientGPU) {
81 LayerParameter layer_param;
82 Caffeine::set_mode(Caffeine::GPU);
83 ReLULayer<TypeParam> layer(layer_param);
84 GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
85 checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
86 }
89 TYPED_TEST(NeuronLayerTest, TestDropoutCPU) {
90 LayerParameter layer_param;
91 Caffeine::set_mode(Caffeine::CPU);
92 Caffeine::set_phase(Caffeine::TRAIN);
93 DropoutLayer<TypeParam> layer(layer_param);
94 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
95 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
96 // Now, check values
97 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
98 const TypeParam* top_data = this->blob_top_->cpu_data();
99 float scale = 1. / (1. - layer_param.dropout_ratio());
100 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
101 if (top_data[i] != 0) {
102 EXPECT_EQ(top_data[i], bottom_data[i] * scale);
103 }
104 }
105 }
108 TYPED_TEST(NeuronLayerTest, TestDropoutGradientCPU) {
109 LayerParameter layer_param;
110 Caffeine::set_mode(Caffeine::CPU);
111 DropoutLayer<TypeParam> layer(layer_param);
112 GradientChecker<TypeParam> checker(1e-2, 1e-3);
113 checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
114 }
117 TYPED_TEST(NeuronLayerTest, TestDropoutCPUTestPhase) {
118 LayerParameter layer_param;
119 Caffeine::set_mode(Caffeine::CPU);
120 Caffeine::set_phase(Caffeine::TEST);
121 DropoutLayer<TypeParam> layer(layer_param);
122 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
123 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
124 // Now, check values
125 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
126 const TypeParam* top_data = this->blob_top_->cpu_data();
127 float scale = 1. / (1. - layer_param.dropout_ratio());
128 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
129 if (top_data[i] != 0) {
130 EXPECT_EQ(top_data[i], bottom_data[i]);
131 }
132 }
133 }
136 TYPED_TEST(NeuronLayerTest, TestDropoutGPU) {
137 LayerParameter layer_param;
138 Caffeine::set_mode(Caffeine::GPU);
139 Caffeine::set_phase(Caffeine::TRAIN);
140 DropoutLayer<TypeParam> layer(layer_param);
141 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
142 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
143 // Now, check values
144 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
145 const TypeParam* top_data = this->blob_top_->cpu_data();
146 float scale = 1. / (1. - layer_param.dropout_ratio());
147 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
148 if (top_data[i] != 0) {
149 EXPECT_EQ(top_data[i], bottom_data[i] * scale);
150 }
151 }
152 }
155 TYPED_TEST(NeuronLayerTest, TestDropoutGradientGPU) {
156 if (CAFFEINE_TEST_CUDA_PROP.major >= 2) {
157 LayerParameter layer_param;
158 Caffeine::set_mode(Caffeine::GPU);
159 DropoutLayer<TypeParam> layer(layer_param);
160 GradientChecker<TypeParam> checker(1e-2, 1e-3);
161 checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
162 } else {
163 LOG(ERROR) << "Skipping test to spare my laptop.";
164 }
165 }
168 TYPED_TEST(NeuronLayerTest, TestDropoutGPUTestPhase) {
169 LayerParameter layer_param;
170 Caffeine::set_mode(Caffeine::GPU);
171 Caffeine::set_phase(Caffeine::TEST);
172 DropoutLayer<TypeParam> layer(layer_param);
173 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
174 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
175 // Now, check values
176 const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
177 const TypeParam* top_data = this->blob_top_->cpu_data();
178 float scale = 1. / (1. - layer_param.dropout_ratio());
179 for (int i = 0; i < this->blob_bottom_->count(); ++i) {
180 if (top_data[i] != 0) {
181 EXPECT_EQ(top_data[i], bottom_data[i]);
182 }
183 }
184 }
186 }