From 25a865cd8ba0995c89907990fedaa357282b9a64 Mon Sep 17 00:00:00 2001 From: Yangqing Jia Date: Thu, 31 Oct 2013 12:18:35 -0700 Subject: [PATCH] stochastic pooling test --- src/caffe/test/test_gradient_check_util.hpp | 4 +- src/caffe/test/test_pooling_layer.cpp | 6 +- src/caffe/test/test_stochastic_pooing.cpp | 162 ++++++++++++++++++++ 3 files changed, 165 insertions(+), 7 deletions(-) create mode 100644 src/caffe/test/test_stochastic_pooing.cpp diff --git a/src/caffe/test/test_gradient_check_util.hpp b/src/caffe/test/test_gradient_check_util.hpp index 49734838..d7360085 100644 --- a/src/caffe/test/test_gradient_check_util.hpp +++ b/src/caffe/test/test_gradient_check_util.hpp @@ -28,9 +28,7 @@ class GradientChecker { : stepsize_(stepsize), threshold_(threshold), seed_(seed), kink_(kink), kink_range_(kink_range) {} // Checks the gradient of a layer, with provided bottom layers and top - // layers. The gradient checker will check the gradient with respect to - // the parameters of the layer, as well as the input blobs if check_through - // is set True. + // layers. // Note that after the gradient check, we do not guarantee that the data // stored in the layer parameters and the blobs are unchanged. void CheckGradient(Layer& layer, vector*>& bottom, diff --git a/src/caffe/test/test_pooling_layer.cpp b/src/caffe/test/test_pooling_layer.cpp index a5d0c9fb..67cae131 100644 --- a/src/caffe/test/test_pooling_layer.cpp +++ b/src/caffe/test/test_pooling_layer.cpp @@ -33,8 +33,6 @@ class PoolingLayerTest : public ::testing::Test { blob_top_vec_.push_back(blob_top_); }; virtual ~PoolingLayerTest() { delete blob_bottom_; delete blob_top_; } - void ReferenceLRNForward(const Blob& blob_bottom, - const LayerParameter& layer_param, Blob* blob_top); Blob* const blob_bottom_; Blob* const blob_top_; vector*> blob_bottom_vec_; @@ -71,7 +69,7 @@ TYPED_TEST(PoolingLayerTest, PrintGPUBackward) { } for (int i = 0; i < this->blob_top_->count(); ++i) { cout << "top data " << i << " " << this->blob_top_->cpu_data()[i] << endl; - } + } for (int i = 0; i < this->blob_top_->count(); ++i) { this->blob_top_->mutable_cpu_diff()[i] = 1.; @@ -79,7 +77,7 @@ TYPED_TEST(PoolingLayerTest, PrintGPUBackward) { layer.Backward(this->blob_top_vec_, true, &(this->blob_bottom_vec_)); for (int i = 0; i < this->blob_bottom_->count(); ++i) { cout << "bottom diff " << i << " " << this->blob_bottom_->cpu_diff()[i] << endl; - } + } } */ diff --git a/src/caffe/test/test_stochastic_pooing.cpp b/src/caffe/test/test_stochastic_pooing.cpp new file mode 100644 index 00000000..e2b60eee --- /dev/null +++ b/src/caffe/test/test_stochastic_pooing.cpp @@ -0,0 +1,162 @@ +// Copyright 2013 Yangqing Jia + +#include +#include + +#include "gtest/gtest.h" +#include "caffe/blob.hpp" +#include "caffe/common.hpp" +#include "caffe/filler.hpp" +#include "caffe/vision_layers.hpp" +#include "caffe/test/test_gradient_check_util.hpp" + +#include "caffe/test/test_caffe_main.hpp" + +namespace caffe { + +extern cudaDeviceProp CAFFE_TEST_CUDA_PROP; + +template +class StochasticPoolingLayerTest : public ::testing::Test { + protected: + StochasticPoolingLayerTest() + : blob_bottom_(new Blob()), + blob_top_(new Blob()) {}; + virtual void SetUp() { + Caffe::set_random_seed(1701); + blob_bottom_->Reshape(2, 3, 6, 5); + // fill the values + FillerParameter filler_param; + filler_param.set_min(0.1); + filler_param.set_max(1.); + UniformFiller filler(filler_param); + filler.Fill(this->blob_bottom_); + blob_bottom_vec_.push_back(blob_bottom_); + blob_top_vec_.push_back(blob_top_); + }; + + virtual ~StochasticPoolingLayerTest() { + delete blob_bottom_; delete blob_top_; + } + + Blob* const blob_bottom_; + Blob* const blob_top_; + vector*> blob_bottom_vec_; + vector*> blob_top_vec_; +}; + +typedef ::testing::Types Dtypes; +TYPED_TEST_CASE(StochasticPoolingLayerTest, Dtypes); + +TYPED_TEST(StochasticPoolingLayerTest, TestSetup) { + LayerParameter layer_param; + layer_param.set_kernelsize(3); + layer_param.set_stride(2); + PoolingLayer layer(layer_param); + layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); + EXPECT_EQ(this->blob_top_->num(), this->blob_bottom_->num()); + EXPECT_EQ(this->blob_top_->channels(), this->blob_bottom_->channels()); + EXPECT_EQ(this->blob_top_->height(), 3); + EXPECT_EQ(this->blob_top_->width(), 2); +} + +TYPED_TEST(StochasticPoolingLayerTest, TestStochasticGPU) { + Caffe::set_mode(Caffe::GPU); + Caffe::set_phase(Caffe::TRAIN); + LayerParameter layer_param; + layer_param.set_kernelsize(3); + layer_param.set_stride(2); + + layer_param.set_pool(LayerParameter_PoolMethod_STOCHASTIC); + PoolingLayer layer(layer_param); + layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); + layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); + + // Check if the output is correct - it should do random sampling + const TypeParam* bottom_data = this->blob_bottom_->cpu_data(); + const TypeParam* top_data = this->blob_top_->cpu_data(); + TypeParam total = 0; + for (int n = 0; n < this->blob_top_->num(); ++n) { + for (int c = 0; c < this->blob_top_->channels(); ++c) { + for (int ph = 0; ph < this->blob_top_->height(); ++ph) { + for (int pw = 0; pw < this->blob_top_->width(); ++pw) { + TypeParam pooled = top_data[this->blob_top_->offset(n, c, ph, pw)]; + total += pooled; + int hstart = ph * 2; + int hend = min(hstart + 3, this->blob_bottom_->height()); + int wstart = pw * 2; + int wend = min(wstart + 3, this->blob_bottom_->width()); + bool has_equal = false; + for (int h = hstart; h < hend; ++h) { + for (int w = wstart; w < wend; ++w) { + has_equal |= (pooled == bottom_data[this->blob_bottom_->offset(n, c, h, w)]); + } + } + EXPECT_TRUE(has_equal); + } + } + } + } + // When we are doing stochastic pooling, the average we get should be higher + // than the simple data average since we are weighting more on higher-valued + // ones. + EXPECT_GE(total / this->blob_top_->count(), 0.55); +} + +TYPED_TEST(StochasticPoolingLayerTest, TestStochasticGPUTestPhase) { + Caffe::set_mode(Caffe::GPU); + Caffe::set_phase(Caffe::TEST); + LayerParameter layer_param; + layer_param.set_kernelsize(3); + layer_param.set_stride(2); + + layer_param.set_pool(LayerParameter_PoolMethod_STOCHASTIC); + PoolingLayer layer(layer_param); + layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); + layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); + + // Check if the output is correct - it should do random sampling + const TypeParam* bottom_data = this->blob_bottom_->cpu_data(); + const TypeParam* top_data = this->blob_top_->cpu_data(); + for (int n = 0; n < this->blob_top_->num(); ++n) { + for (int c = 0; c < this->blob_top_->channels(); ++c) { + for (int ph = 0; ph < this->blob_top_->height(); ++ph) { + for (int pw = 0; pw < this->blob_top_->width(); ++pw) { + TypeParam pooled = top_data[this->blob_top_->offset(n, c, ph, pw)]; + int hstart = ph * 2; + int hend = min(hstart + 3, this->blob_bottom_->height()); + int wstart = pw * 2; + int wend = min(wstart + 3, this->blob_bottom_->width()); + bool smaller_than_max = false; + for (int h = hstart; h < hend; ++h) { + for (int w = wstart; w < wend; ++w) { + smaller_than_max |= (pooled <= bottom_data[this->blob_bottom_->offset(n, c, h, w)]); + } + } + EXPECT_TRUE(smaller_than_max); + } + } + } + } +} + + + +TYPED_TEST(StochasticPoolingLayerTest, TestGradientGPU) { + Caffe::set_mode(Caffe::GPU); + Caffe::set_phase(Caffe::TRAIN); + LayerParameter layer_param; + layer_param.set_kernelsize(3); + layer_param.set_stride(2); + + layer_param.set_pool(LayerParameter_PoolMethod_STOCHASTIC); + PoolingLayer layer(layer_param); + GradientChecker checker(1e-2, 1e-3); + // it is too expensive to call curand multiple times, so we don't do an + // exhaustive gradient check. + checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_); +} + + + +} -- 2.26.2