1 // Copyright 2013 Yangqing Jia
3 #include <cmath>
4 #include <cstring>
5 #include <cuda_runtime.h>
7 #include "gtest/gtest.h"
8 #include "caffe/blob.hpp"
9 #include "caffe/common.hpp"
10 #include "caffe/filler.hpp"
11 #include "caffe/vision_layers.hpp"
12 #include "caffe/test/test_gradient_check_util.hpp"
14 #include "caffe/test/test_caffe_main.hpp"
16 namespace caffe {
18 extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
20 template <typename Dtype>
21 class SoftmaxLayerTest : public ::testing::Test {
22 protected:
23 SoftmaxLayerTest()
24 : blob_bottom_(new Blob<Dtype>(2, 10, 1, 1)),
25 blob_top_(new Blob<Dtype>()) {
26 // fill the values
27 FillerParameter filler_param;
28 GaussianFiller<Dtype> filler(filler_param);
29 filler.Fill(this->blob_bottom_);
30 blob_bottom_vec_.push_back(blob_bottom_);
31 blob_top_vec_.push_back(blob_top_);
32 };
33 virtual ~SoftmaxLayerTest() { delete blob_bottom_; delete blob_top_; }
34 Blob<Dtype>* const blob_bottom_;
35 Blob<Dtype>* const blob_top_;
36 vector<Blob<Dtype>*> blob_bottom_vec_;
37 vector<Blob<Dtype>*> blob_top_vec_;
38 };
40 typedef ::testing::Types<float, double> Dtypes;
41 TYPED_TEST_CASE(SoftmaxLayerTest, Dtypes);
43 TYPED_TEST(SoftmaxLayerTest, TestForwardCPU) {
44 LayerParameter layer_param;
45 Caffe::set_mode(Caffe::CPU);
46 SoftmaxLayer<TypeParam> layer(layer_param);
47 layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
48 layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
49 // Test sum
50 for (int i = 0; i < this->blob_bottom_->num(); ++i) {
51 TypeParam sum = 0;
52 for (int j = 0; j < this->blob_top_->channels(); ++j) {
53 sum += this->blob_top_->data_at(i, j, 0, 0);
54 }
55 EXPECT_GE(sum, 0.999);
56 EXPECT_LE(sum, 1.001);
57 }
58 // Test exact values
59 for (int i = 0; i < this->blob_bottom_->num(); ++i) {
60 TypeParam scale = 0;
61 for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
62 scale += exp(this->blob_bottom_->data_at(i, j, 0, 0));
63 }
64 for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
65 EXPECT_GE(this->blob_top_->data_at(i, j, 0, 0) + 1e-4,
66 exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale)
67 << "debug: " << i << " " << j;
68 EXPECT_LE(this->blob_top_->data_at(i, j, 0, 0) - 1e-4,
69 exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale)
70 << "debug: " << i << " " << j;
71 }
72 }
73 }
75 TYPED_TEST(SoftmaxLayerTest, TestGradientCPU) {
76 LayerParameter layer_param;
77 Caffe::set_mode(Caffe::CPU);
78 SoftmaxLayer<TypeParam> layer(layer_param);
79 GradientChecker<TypeParam> checker(1e-2, 1e-3);
80 checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
81 }
83 }