]> Gitweb @ Texas Instruments - Open Source Git Repositories - git.TI.com/gitweb - jacinto-ai/caffe-jacinto.git/blob - src/caffeine/test/test_neuron_layer.cpp
misc update
[jacinto-ai/caffe-jacinto.git] / src / caffeine / test / test_neuron_layer.cpp
1 #include <cstring>
2 #include <cuda_runtime.h>
4 #include "gtest/gtest.h"
5 #include "caffeine/blob.hpp"
6 #include "caffeine/common.hpp"
7 #include "caffeine/filler.hpp"
8 #include "caffeine/vision_layers.hpp"
10 namespace caffeine {
11   
12 template <typename Dtype>
13 class NeuronLayerTest : public ::testing::Test {
14  protected:
15   NeuronLayerTest()
16       : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
17         blob_top_(new Blob<Dtype>()) {
18     // fill the values
19     FillerParameter filler_param;
20     GaussianFiller<Dtype> filler(filler_param);
21     filler.Fill(this->blob_bottom_);
22     blob_bottom_vec_.push_back(blob_bottom_);
23     blob_top_vec_.push_back(blob_top_);
24   };
25   virtual ~NeuronLayerTest() { delete blob_bottom_; delete blob_top_; }
26   Blob<Dtype>* const blob_bottom_;
27   Blob<Dtype>* const blob_top_;
28   vector<Blob<Dtype>*> blob_bottom_vec_;
29   vector<Blob<Dtype>*> blob_top_vec_;
30 };
32 typedef ::testing::Types<float, double> Dtypes;
33 TYPED_TEST_CASE(NeuronLayerTest, Dtypes);
35 TYPED_TEST(NeuronLayerTest, TestReLUCPU) {
36   LayerParameter layer_param;
37   Caffeine::set_mode(Caffeine::CPU);
38   ReLULayer<TypeParam> layer(layer_param);
39   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
40   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
41   // Now, check values
42   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
43   const TypeParam* top_data = this->blob_top_->cpu_data();
44   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
45     EXPECT_GE(top_data[i], 0.);
46     EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
47   }
48 }
50 TYPED_TEST(NeuronLayerTest, TestReLUGPU) {
51   LayerParameter layer_param;
52   Caffeine::set_mode(Caffeine::GPU);
53   ReLULayer<TypeParam> layer(layer_param);
54   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
55   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
56   // Now, check values
57   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
58   const TypeParam* top_data = this->blob_top_->cpu_data();
59   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
60     EXPECT_GE(top_data[i], 0.);
61     EXPECT_TRUE(top_data[i] == 0 || top_data[i] == bottom_data[i]);
62   }
63 }
65 TYPED_TEST(NeuronLayerTest, TestDropoutCPU) {
66   LayerParameter layer_param;
67   Caffeine::set_mode(Caffeine::CPU);
68   Caffeine::set_phase(Caffeine::TRAIN);
69   DropoutLayer<TypeParam> layer(layer_param);
70   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
71   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
72   // Now, check values
73   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
74   const TypeParam* top_data = this->blob_top_->cpu_data();
75   float scale = 1. / (1. - layer_param.dropout_ratio());
76   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
77     if (top_data[i] != 0) {
78       EXPECT_EQ(top_data[i], bottom_data[i] * scale);
79     }
80   }
81 }
83 TYPED_TEST(NeuronLayerTest, TestDropoutCPUTestPhase) {
84   LayerParameter layer_param;
85   Caffeine::set_mode(Caffeine::CPU);
86   Caffeine::set_phase(Caffeine::TEST);
87   DropoutLayer<TypeParam> layer(layer_param);
88   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
89   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
90   // Now, check values
91   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
92   const TypeParam* top_data = this->blob_top_->cpu_data();
93   float scale = 1. / (1. - layer_param.dropout_ratio());
94   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
95     if (top_data[i] != 0) {
96       EXPECT_EQ(top_data[i], bottom_data[i]);
97     }
98   }
99 }
101 TYPED_TEST(NeuronLayerTest, TestDropoutGPU) {
102   LayerParameter layer_param;
103   Caffeine::set_mode(Caffeine::GPU);
104   Caffeine::set_phase(Caffeine::TRAIN);
105   DropoutLayer<TypeParam> layer(layer_param);
106   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
107   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
108   // Now, check values
109   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
110   const TypeParam* top_data = this->blob_top_->cpu_data();
111   float scale = 1. / (1. - layer_param.dropout_ratio());
112   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
113     if (top_data[i] != 0) {
114       EXPECT_EQ(top_data[i], bottom_data[i] * scale);
115     }
116   }
119 TYPED_TEST(NeuronLayerTest, TestDropoutGPUTestPhase) {
120   LayerParameter layer_param;
121   Caffeine::set_mode(Caffeine::GPU);
122   Caffeine::set_phase(Caffeine::TEST);
123   DropoutLayer<TypeParam> layer(layer_param);
124   layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
125   layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
126   // Now, check values
127   const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
128   const TypeParam* top_data = this->blob_top_->cpu_data();
129   float scale = 1. / (1. - layer_param.dropout_ratio());
130   for (int i = 0; i < this->blob_bottom_->count(); ++i) {
131     if (top_data[i] != 0) {
132       EXPECT_EQ(top_data[i], bottom_data[i]);
133     }
134   }