misc sample programs
authorYangqing Jia <jiayq84@gmail.com>
Fri, 4 Oct 2013 04:02:24 +0000 (21:02 -0700)
committerYangqing Jia <jiayq84@gmail.com>
Fri, 4 Oct 2013 04:02:24 +0000 (21:02 -0700)
src/programs/test_read_imagenet.cpp [new file with mode: 0644]
src/programs/train_alexnet.cpp [new file with mode: 0644]

diff --git a/src/programs/test_read_imagenet.cpp b/src/programs/test_read_imagenet.cpp
new file mode 100644 (file)
index 0000000..508dc54
--- /dev/null
@@ -0,0 +1,35 @@
+// Copyright 2013 Yangqing Jia
+
+#include <glog/logging.h>
+#include <leveldb/db.h>
+#include <leveldb/write_batch.h>
+
+#include <string>
+
+int main(int argc, char** argv) {
+  ::google::InitGoogleLogging(argv[0]);
+  leveldb::DB* db;
+  leveldb::Options options;
+  options.create_if_missing = false;
+
+  LOG(INFO) << "Opening leveldb " << argv[1];
+  leveldb::Status status = leveldb::DB::Open(
+      options, argv[1], &db);
+  CHECK(status.ok()) << "Failed to open leveldb " << argv[1];
+
+  leveldb::ReadOptions read_options;
+  read_options.fill_cache = false;
+  int count = 0;
+  leveldb::Iterator* it = db->NewIterator(read_options);
+  for (it->SeekToFirst(); it->Valid(); it->Next()) {
+    // just a dummy operation
+    volatile std::string value = it->value().ToString();
+    // LOG(ERROR) << it->key().ToString();
+    if (++count % 10000 == 0) {
+      LOG(ERROR) << "Processed " << count << " files.";
+    }
+  }
+
+  delete db;
+  return 0;
+}
diff --git a/src/programs/train_alexnet.cpp b/src/programs/train_alexnet.cpp
new file mode 100644 (file)
index 0000000..c86a946
--- /dev/null
@@ -0,0 +1,100 @@
+// Copyright 2013 Yangqing Jia
+
+#include <cuda_runtime.h>
+#include <fcntl.h>
+#include <google/protobuf/text_format.h>
+
+#include <cstring>
+
+#include "caffe/blob.hpp"
+#include "caffe/common.hpp"
+#include "caffe/net.hpp"
+#include "caffe/filler.hpp"
+#include "caffe/proto/caffe.pb.h"
+#include "caffe/util/io.hpp"
+#include "caffe/optimization/solver.hpp"
+
+using namespace caffe;
+
+int main(int argc, char** argv) {
+  cudaSetDevice(1);
+  Caffe::set_mode(Caffe::GPU);
+
+  NetParameter net_param;
+  ReadProtoFromTextFile(argv[1],
+      &net_param);
+  vector<Blob<float>*> bottom_vec;
+  Net<float> caffe_net(net_param, bottom_vec);
+
+  // Run the network without training.
+  LOG(ERROR) << "Performing Forward";
+  caffe_net.Forward(bottom_vec);
+  LOG(ERROR) << "Performing Backward";
+  LOG(ERROR) << "Initial loss: " << caffe_net.Backward();
+
+  // Run the network without training.
+  LOG(ERROR) << "Multiple Passes";
+  for (int i = 0; i < 100; ++i) {
+    caffe_net.ForwardBackward(bottom_vec);
+  }
+  LOG(ERROR) << "Multiple passes done.";
+/*
+  SolverParameter solver_param;
+  solver_param.set_base_lr(0.01);
+  solver_param.set_display(0);
+  solver_param.set_max_iter(6000);
+  solver_param.set_lr_policy("inv");
+  solver_param.set_gamma(0.0001);
+  solver_param.set_power(0.75);
+  solver_param.set_momentum(0.9);
+
+  LOG(ERROR) << "Starting Optimization";
+  SGDSolver<float> solver(solver_param);
+  solver.Solve(&caffe_net);
+  LOG(ERROR) << "Optimization Done.";
+
+  // Run the network after training.
+  LOG(ERROR) << "Performing Forward";
+  caffe_net.Forward(bottom_vec);
+  LOG(ERROR) << "Performing Backward";
+  float loss = caffe_net.Backward();
+  LOG(ERROR) << "Final loss: " << loss;
+
+  NetParameter trained_net_param;
+  caffe_net.ToProto(&trained_net_param);
+
+  NetParameter traintest_net_param;
+  ReadProtoFromTextFile("caffe/test/data/lenet_traintest.prototxt",
+      &traintest_net_param);
+  Net<float> caffe_traintest_net(traintest_net_param, bottom_vec);
+  caffe_traintest_net.CopyTrainedLayersFrom(trained_net_param);
+
+  // Test run
+  double train_accuracy = 0;
+  int batch_size = traintest_net_param.layers(0).layer().batchsize();
+  for (int i = 0; i < 60000 / batch_size; ++i) {
+    const vector<Blob<float>*>& result =
+        caffe_traintest_net.Forward(bottom_vec);
+    train_accuracy += result[0]->cpu_data()[0];
+  }
+  train_accuracy /= 60000 / batch_size;
+  LOG(ERROR) << "Train accuracy:" << train_accuracy;
+
+  NetParameter test_net_param;
+  ReadProtoFromTextFile("caffe/test/data/lenet_test.prototxt", &test_net_param);
+  Net<float> caffe_test_net(test_net_param, bottom_vec);
+  caffe_test_net.CopyTrainedLayersFrom(trained_net_param);
+
+  // Test run
+  double test_accuracy = 0;
+  batch_size = test_net_param.layers(0).layer().batchsize();
+  for (int i = 0; i < 10000 / batch_size; ++i) {
+    const vector<Blob<float>*>& result =
+        caffe_test_net.Forward(bottom_vec);
+    test_accuracy += result[0]->cpu_data()[0];
+  }
+  test_accuracy /= 10000 / batch_size;
+  LOG(ERROR) << "Test accuracy:" << test_accuracy;
+*/
+  return 0;
+}