1 // Copyright Yangqing Jia 2013
3 #include <map>
4 #include <set>
5 #include <string>
6 #include <vector>
8 #include "caffe/proto/caffe.pb.h"
9 #include "caffe/layer_factory.hpp"
10 #include "caffe/net.hpp"
12 using std::pair;
13 using std::map;
14 using std::set;
16 namespace caffe {
18 template <typename Dtype>
19 Net<Dtype>::Net(const NetParameter& param,
20 const vector<Blob<Dtype>* >& bottom) {
21 // Basically, build all the layers and set up its connections.
22 name_ = param.name();
23 map<string, int> blob_name_to_idx;
24 set<string> available_blobs;
25 int num_layers = param.layers_size();
26 CHECK_EQ(bottom.size(), param.bottom_size())
27 << "Incorrect bottom blob size.";
28 // set the input blobs
29 for (int i = 0; i < param.bottom_size(); ++i) {
30 const string& blob_name = param.bottom(i);
31 CHECK_GT(bottom[i]->count(), 0);
32 shared_ptr<Blob<Dtype> > blob_pointer(
33 new Blob<Dtype>(bottom[i]->num(), bottom[i]->channels(),
34 bottom[i]->height(), bottom[i]->width()));
35 blobs_.push_back(blob_pointer);
36 blob_names_.push_back(blob_name);
37 net_input_blob_indices_.push_back(i);
38 blob_name_to_idx[blob_name] = i;
39 available_blobs.insert(blob_name);
40 }
41 // For each layer, set up their input and output
42 bottom_vecs_.resize(param.layers_size());
43 top_vecs_.resize(param.layers_size());
44 bottom_id_vecs_.resize(param.layers_size());
45 top_id_vecs_.resize(param.layers_size());
46 for (int i = 0; i < param.layers_size(); ++i) {
47 const LayerConnection& layer_connection = param.layers(i);
48 const LayerParameter& layer_param = layer_connection.layer();
49 layers_.push_back(shared_ptr<Layer<Dtype> >(GetLayer<Dtype>(layer_param)));
50 layer_names_.push_back(layer_param.name());
51 LOG(INFO) << "Creating Layer " << layer_param.name();
52 // Figure out this layer's input and output
53 for (int j = 0; j < layer_connection.bottom_size(); ++j) {
54 const string& blob_name = layer_connection.bottom(j);
55 if (available_blobs.find(blob_name) == available_blobs.end()) {
56 LOG(FATAL) << "Unknown blob input " << blob_name <<
57 " to layer" << j;
58 }
59 LOG(INFO) << layer_param.name() << " <- " << blob_name;
60 bottom_vecs_[i].push_back(
61 blobs_[blob_name_to_idx[blob_name]].get());
62 bottom_id_vecs_[i].push_back(blob_name_to_idx[blob_name]);
63 available_blobs.erase(blob_name);
64 }
65 for (int j = 0; j < layer_connection.top_size(); ++j) {
66 const string& blob_name = layer_connection.top(j);
67 if (blob_name_to_idx.find(blob_name) != blob_name_to_idx.end()) {
68 LOG(FATAL) << "Duplicate blobs produced by multiple sources.";
69 }
70 LOG(INFO) << layer_param.name() << " -> " << blob_name;
71 shared_ptr<Blob<Dtype> > blob_pointer(new Blob<Dtype>());
72 blobs_.push_back(blob_pointer);
73 blob_names_.push_back(blob_name);
74 blob_name_to_idx[blob_name] = blob_names_.size() - 1;
75 available_blobs.insert(blob_name);
76 top_vecs_[i].push_back(blobs_[blob_names_.size() - 1].get());
77 top_id_vecs_[i].push_back(blob_names_.size() - 1);
78 }
79 }
80 LOG(INFO) << "Checking top blobs.";
81 // In the end, check if all remaining available blobs are top blobs.
82 for (int i = 0; i < param.top_size(); ++i) {
83 const string& blob_name = param.top(i);
84 if (blob_name_to_idx.find(blob_name) == blob_name_to_idx.end()) {
85 LOG(FATAL) << "Unknown blob output " << blob_name;
86 }
87 net_output_blob_indices_.push_back(blob_name_to_idx[blob_name]);
88 available_blobs.erase(blob_name);
89 }
90 if (!available_blobs.empty()) {
91 LOG(WARNING) << "There are some internal blobs not used:";
92 for (set<string>::iterator it = available_blobs.begin();
93 it != available_blobs.end(); ++it) {
94 LOG(WARNING) << " " << *it;
95 }
96 }
98 LOG(INFO) << "Setting up the layers.";
99 for (int i = 0; i < layers_.size(); ++i) {
100 LOG(INFO) << "Setting up " << layer_names_[i];
101 layers_[i]->SetUp(bottom_vecs_[i], &top_vecs_[i]);
102 vector<shared_ptr<Blob<Dtype> > >& layer_params = layers_[i]->params();
103 for (int j = 0; j < layer_params.size(); ++j) {
104 params_.push_back(layer_params[j]);
105 }
106 }
108 LOG(INFO) << "Network initialization done.";
109 }
111 template <typename Dtype>
112 void Net<Dtype>::Forward(const vector<Blob<Dtype>*> & bottom,
113 vector<Blob<Dtype>*>* top) {
114 // Copy bottom to internal bottom
115 for (int i = 0; i < bottom.size(); ++i) {
116 blobs_[net_input_blob_indices_[i]]->CopyFrom(*bottom[i]);
117 }
118 for (int i = 0; i < layers_.size(); ++i) {
119 layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]);
120 }
121 // Copy internal top to top
122 for (int i = 0; i < (*top).size(); ++i) {
123 (*top)[i]->CopyFrom(*blobs_[net_output_blob_indices_[i]]);
124 }
125 }
127 template <typename Dtype>
128 Dtype Net<Dtype>::Backward() {
129 Dtype loss = 0;
130 // TODO(Yangqing): figure out those layers that do not need backward.
131 for (int i = layers_.size() - 1; i >= 0; --i) {
132 Dtype layer_loss = layers_[i]->Backward(
133 top_vecs_[i], true, &bottom_vecs_[i]);
134 loss += layer_loss;
135 }
136 return loss;
137 }
139 template <typename Dtype>
140 void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
141 int num_source_layers = param.layers_size();
142 for (int i = 0; i < num_source_layers; ++i) {
143 const LayerParameter& source_layer = param.layers(i).layer();
144 const string& source_layer_name = source_layer.name();
145 int target_layer_id = 0;
146 while (target_layer_id != layer_names_.size() &&
147 layer_names_[target_layer_id] != source_layer_name) {
148 ++target_layer_id;
149 }
150 if (target_layer_id == layer_names_.size()) {
151 LOG(INFO) << "Ignoring source layer " << source_layer_name;
152 continue;
153 }
154 LOG(INFO) << "Loading source layer " << source_layer_name;
155 vector<shared_ptr<Blob<Dtype> > >& target_blobs =
156 layers_[target_layer_id]->params();
157 CHECK_EQ(target_blobs.size(), source_layer.blobs_size())
158 << "Incompatible number of blobs for layer " << source_layer_name;
159 for (int j = 0; j < target_blobs.size(); ++j) {
160 target_blobs[j]->FromProto(source_layer.blobs(j));
161 }
162 }
163 }
165 template <typename Dtype>
166 void Net<Dtype>::ToProto(NetParameter* param, bool write_diff) {
167 param->Clear();
168 param->set_name(name_);
169 // Add bottom and top
170 for (int i = 0; i < net_input_blob_indices_.size(); ++i) {
171 param->add_bottom(blob_names_[net_input_blob_indices_[i]]);
172 }
173 for (int i = 0; i < net_output_blob_indices_.size(); ++i) {
174 param->add_top(blob_names_[net_output_blob_indices_[i]]);
175 }
176 for (int i = 0; i < layers_.size(); ++i) {
177 LayerConnection* layer_connection = param->add_layers();
178 for (int j = 0; j < bottom_id_vecs_[i].size(); ++i) {
179 layer_connection->add_bottom(blob_names_[bottom_id_vecs_[i][j]]);
180 }
181 for (int j = 0; j < top_id_vecs_[i].size(); ++i) {
182 layer_connection->add_top(blob_names_[top_id_vecs_[i][j]]);
183 }
184 LayerParameter* layer_parameter = layer_connection->mutable_layer();
185 layers_[i]->ToProto(layer_parameter);
186 }
187 }
189 template <typename Dtype>
190 void Net<Dtype>::Update() {
191 for (int i = 0; i < params_.size(); ++i) {
192 params_[i]->Update();
193 }
194 }
196 INSTANTIATE_CLASS(Net);
198 } // namespace caffe