summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: b5e50de)
raw | patch | inline | side by side (parent: b5e50de)
author | Yangqing Jia <jiayq84@gmail.com> | |
Thu, 26 Sep 2013 05:38:20 +0000 (22:38 -0700) | ||
committer | Yangqing Jia <jiayq84@gmail.com> | |
Thu, 26 Sep 2013 05:38:20 +0000 (22:38 -0700) |
src/caffe/blob.cpp | patch | blob | history | |
src/caffe/layers/softmax_layer.cpp | patch | blob | history | |
src/caffe/test/test_softmax_layer.cpp | patch | blob | history |
diff --git a/src/caffe/blob.cpp b/src/caffe/blob.cpp
index aacb05c7c58007254eb41a8487322a209264c248..ecb37b7f15ef7730d1d97ee52a5d778f1b967c3a 100644 (file)
--- a/src/caffe/blob.cpp
+++ b/src/caffe/blob.cpp
memcpy(diff_->mutable_cpu_data(), source.cpu_diff(),
count_ * sizeof(Dtype));
}
+ return (*this);
}
template <typename Dtype>
index 31f25c38bd8e8ee81c85279bc85654cb27ad7b4a..ead05b3094b2929f3d5d27d85a695b694b94237b 100644 (file)
sum_multiplier_.Reshape(1, bottom[0]->channels(),
bottom[0]->height(), bottom[0]->width());
Dtype* multiplier_data = sum_multiplier_.mutable_cpu_data();
- for (int i = 0; i < bottom[0]->num(); ++i) {
+ for (int i = 0; i < sum_multiplier_.count(); ++i) {
multiplier_data[i] = 1.;
}
scale_.Reshape(bottom[0]->num(), 1, 1, 1);
index 37391ea65c99d1cecc0661da80846aa13fe26790..253ea8a9f09bb37dc635eb740e1ff834ceed9949 100644 (file)
// Copyright 2013 Yangqing Jia
+#include <cmath>
#include <cstring>
#include <cuda_runtime.h>
typedef ::testing::Types<float, double> Dtypes;
TYPED_TEST_CASE(SoftmaxLayerTest, Dtypes);
-TYPED_TEST(SoftmaxLayerTest, TestReLUCPU) {
+TYPED_TEST(SoftmaxLayerTest, TestForwardCPU) {
LayerParameter layer_param;
Caffe::set_mode(Caffe::CPU);
SoftmaxLayer<TypeParam> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
- NOT_IMPLEMENTED;
+ for (int i = 0; i < this->blob_bottom_->num(); ++i) {
+ TypeParam scale = 0;
+ for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+ scale += exp(this->blob_bottom_->data_at(i, j, 0, 0));
+ }
+ for (int j = 0; j < this->blob_bottom_->channels(); ++j) {
+ EXPECT_GE(this->blob_top_->data_at(i, j, 0, 0) + 1e-4,
+ exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale)
+ << "debug: " << i << " " << j;
+ EXPECT_LE(this->blob_top_->data_at(i, j, 0, 0) - 1e-4,
+ exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale)
+ << "debug: " << i << " " << j;
+ }
+ }
}
-
+TYPED_TEST(SoftmaxLayerTest, TestGradientCPU) {
+ LayerParameter layer_param;
+ Caffe::set_mode(Caffe::CPU);
+ SoftmaxLayer<TypeParam> layer(layer_param);
+ GradientChecker<TypeParam> checker(1e-2, 1e-3);
+ checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+}
}