diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp
index 93969b159683f8807c5d30411acf88a7e170e3f9..bc699fa2c62756e5636ebb041d672d3ed8a929e1 100644 (file)
--- a/src/caffe/net.cpp
+++ b/src/caffe/net.cpp
map<string, int> blob_name_to_idx;
set<string> available_blobs;
int num_layers = param.layers_size();
- CHECK_EQ(bottom.size(), param.bottom_size())
+ CHECK_EQ(bottom.size(), param.input_size())
<< "Incorrect bottom blob size.";
// set the input blobs
- for (int i = 0; i < param.bottom_size(); ++i) {
- const string& blob_name = param.bottom(i);
- blobs_.push_back(Blob<Dtype>(*bottom[i]));
+ for (int i = 0; i < param.input_size(); ++i) {
+ const string& blob_name = param.input(i);
+ CHECK_GT(bottom[i]->count(), 0);
+ shared_ptr<Blob<Dtype> > blob_pointer(
+ new Blob<Dtype>(bottom[i]->num(), bottom[i]->channels(),
+ bottom[i]->height(), bottom[i]->width()));
+ blobs_.push_back(blob_pointer);
blob_names_.push_back(blob_name);
net_input_blob_indices_.push_back(i);
blob_name_to_idx[blob_name] = i;
available_blobs.insert(blob_name);
}
// For each layer, set up their input and output
- layers_.resize(param.layers_size());
bottom_vecs_.resize(param.layers_size());
top_vecs_.resize(param.layers_size());
- for (int i = 0; i < param.top_size(); ++i) {
+ bottom_id_vecs_.resize(param.layers_size());
+ top_id_vecs_.resize(param.layers_size());
+ for (int i = 0; i < param.layers_size(); ++i) {
const LayerConnection& layer_connection = param.layers(i);
const LayerParameter& layer_param = layer_connection.layer();
- layers_[i].reset(GetLayer<Dtype>(layer_param));
+ layers_.push_back(shared_ptr<Layer<Dtype> >(GetLayer<Dtype>(layer_param)));
+ layer_names_.push_back(layer_param.name());
+ LOG(INFO) << "Creating Layer " << layer_param.name();
// Figure out this layer's input and output
for (int j = 0; j < layer_connection.bottom_size(); ++j) {
const string& blob_name = layer_connection.bottom(j);
LOG(FATAL) << "Unknown blob input " << blob_name <<
" to layer" << j;
}
+ LOG(INFO) << layer_param.name() << " <- " << blob_name;
bottom_vecs_[i].push_back(
- &blobs_[blob_name_to_idx[blob_name]]);
+ blobs_[blob_name_to_idx[blob_name]].get());
+ bottom_id_vecs_[i].push_back(blob_name_to_idx[blob_name]);
available_blobs.erase(blob_name);
}
for (int j = 0; j < layer_connection.top_size(); ++j) {
if (blob_name_to_idx.find(blob_name) != blob_name_to_idx.end()) {
LOG(FATAL) << "Duplicate blobs produced by multiple sources.";
}
- blobs_.push_back(Blob<Dtype>());
+ LOG(INFO) << layer_param.name() << " -> " << blob_name;
+ shared_ptr<Blob<Dtype> > blob_pointer(new Blob<Dtype>());
+ blobs_.push_back(blob_pointer);
blob_names_.push_back(blob_name);
blob_name_to_idx[blob_name] = blob_names_.size() - 1;
available_blobs.insert(blob_name);
- top_vecs_[i].push_back(&blobs_[blob_names_.size() - 1]);
+ top_vecs_[i].push_back(blobs_[blob_names_.size() - 1].get());
+ top_id_vecs_[i].push_back(blob_names_.size() - 1);
}
}
- // In the end, check if all remaining available blobs are top blobs.
- for (int i = 0; i < param.top_size(); ++i) {
- const string& blob_name = param.top(i);
- if (blob_name_to_idx.find(blob_name) == blob_name_to_idx.end()) {
- LOG(FATAL) << "Unknown blob input " << blob_name;
- }
- net_output_blob_indices_.push_back(blob_name_to_idx[blob_name]);
- available_blobs.erase(blob_name);
- }
- if (!available_blobs.empty()) {
- LOG(WARNING) << "There are some internal blobs not used:";
- for (set<string>::iterator it = available_blobs.begin();
- it != available_blobs.end(); ++it) {
- LOG(WARNING) << " " << *it;
- }
+ // In the end, all remaining blobs are considered output blobs.
+ for (set<string>::iterator it = available_blobs.begin();
+ it != available_blobs.end(); ++it) {
+ LOG(ERROR) << "This network produces output " << *it;
+ net_output_blob_indices_.push_back(blob_name_to_idx[*it]);
+ net_output_blobs_.push_back(blobs_[blob_name_to_idx[*it]].get());
}
- LOG(INFO) << "Setting up the layers.";
+ LOG(ERROR) << "Setting up the layers.";
for (int i = 0; i < layers_.size(); ++i) {
+ LOG(INFO) << "Setting up " << layer_names_[i];
layers_[i]->SetUp(bottom_vecs_[i], &top_vecs_[i]);
+ vector<shared_ptr<Blob<Dtype> > >& layer_params = layers_[i]->params();
+ for (int j = 0; j < layer_params.size(); ++j) {
+ params_.push_back(layer_params[j]);
+ }
}
+
+ LOG(ERROR) << "Network initialization done.";
}
template <typename Dtype>
-void Net<Dtype>::Forward(const vector<Blob<Dtype>*> & bottom,
- vector<Blob<Dtype>*>* top) {
+const vector<Blob<Dtype>*>& Net<Dtype>::Forward(
+ const vector<Blob<Dtype>*> & bottom) {
// Copy bottom to internal bottom
for (int i = 0; i < bottom.size(); ++i) {
- blobs_[net_input_blob_indices_[i]] = *bottom[i];
+ blobs_[net_input_blob_indices_[i]]->CopyFrom(*bottom[i]);
}
for (int i = 0; i < layers_.size(); ++i) {
layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]);
}
- // Copy internal top to top
- for (int i = 0; i < (*top).size(); ++i) {
- NOT_IMPLEMENTED;
- }
+ return net_output_blobs_;
}
template <typename Dtype>
Dtype loss = 0;
// TODO(Yangqing): figure out those layers that do not need backward.
for (int i = layers_.size() - 1; i >= 0; --i) {
- loss += layers_[i]->Backward(top_vecs_[i], true, &bottom_vecs_[i]);
+ Dtype layer_loss = layers_[i]->Backward(
+ top_vecs_[i], true, &bottom_vecs_[i]);
+ loss += layer_loss;
}
return loss;
}
continue;
}
LOG(INFO) << "Loading source layer " << source_layer_name;
- vector<Blob<Dtype> >& target_blobs = layers_[target_layer_id]->params();
+ vector<shared_ptr<Blob<Dtype> > >& target_blobs =
+ layers_[target_layer_id]->params();
CHECK_EQ(target_blobs.size(), source_layer.blobs_size())
<< "Incompatible number of blobs for layer " << source_layer_name;
for (int j = 0; j < target_blobs.size(); ++j) {
- target_blobs[j].FromProto(source_layer.blobs(j));
+ target_blobs[j]->FromProto(source_layer.blobs(j));
}
}
}
param->set_name(name_);
// Add bottom and top
for (int i = 0; i < net_input_blob_indices_.size(); ++i) {
- param->add_bottom(blob_names_[net_input_blob_indices_[i]]);
- }
- for (int i = 0; i < net_input_blob_indices_.size(); ++i) {
- param->add_bottom(blob_names_[net_input_blob_indices_[i]]);
+ param->add_input(blob_names_[net_input_blob_indices_[i]]);
}
for (int i = 0; i < layers_.size(); ++i) {
LayerConnection* layer_connection = param->add_layers();
+ for (int j = 0; j < bottom_id_vecs_[i].size(); ++i) {
+ layer_connection->add_bottom(blob_names_[bottom_id_vecs_[i][j]]);
+ }
+ for (int j = 0; j < top_id_vecs_[i].size(); ++i) {
+ layer_connection->add_top(blob_names_[top_id_vecs_[i][j]]);
+ }
+ LayerParameter* layer_parameter = layer_connection->mutable_layer();
+ layers_[i]->ToProto(layer_parameter);
+ }
+}
+
+template <typename Dtype>
+void Net<Dtype>::Update() {
+ for (int i = 0; i < params_.size(); ++i) {
+ params_[i]->Update();
}
}