add mnist data
authorYangqing Jia <jiayq84@gmail.com>
Wed, 16 Oct 2013 20:23:03 +0000 (13:23 -0700)
committerYangqing Jia <jiayq84@gmail.com>
Wed, 16 Oct 2013 20:23:03 +0000 (13:23 -0700)
data/get_mnist.sh [new file with mode: 0755]
data/lenet.prototxt [new file with mode: 0644]
data/lenet_test.prototxt [new file with mode: 0644]
src/caffe/pyutil/drawnet.py

diff --git a/data/get_mnist.sh b/data/get_mnist.sh
new file mode 100755 (executable)
index 0000000..ec979bd
--- /dev/null
@@ -0,0 +1,18 @@
+#!/usr/bin/env sh
+# This scripts downloads the mnist data and unzips it.
+
+echo "Downloading..."
+
+wget -q http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
+wget -q http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
+wget -q http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
+wget -q http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
+
+echo "Unzipping..."
+
+gunzip train-images-idx3-ubyte.gz
+gunzip train-labels-idx1-ubyte.gz
+gunzip t10k-images-idx3-ubyte.gz
+gunzip t10k-labels-idx1-ubyte.gz
+
+echo "Done."
diff --git a/data/lenet.prototxt b/data/lenet.prototxt
new file mode 100644 (file)
index 0000000..085ed43
--- /dev/null
@@ -0,0 +1,122 @@
+name: "LeNet"
+layers {
+  layer {
+    name: "mnist"
+    type: "data"
+    source: "data/mnist-train-leveldb"
+    batchsize: 64
+    scale: 0.00390625
+  }
+  top: "data"
+  top: "label"
+}
+layers {
+  layer {
+    name: "conv1"
+    type: "conv"
+    num_output: 20
+    kernelsize: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+    blobs_lr: 1.
+    blobs_lr: 2.
+  }
+  bottom: "data"
+  top: "conv1"
+}
+layers {
+  layer {
+    name: "pool1"
+    type: "pool"
+    kernelsize: 2
+    stride: 2
+    pool: MAX
+  }
+  bottom: "conv1"
+  top: "pool1"
+}
+layers {
+  layer {
+    name: "conv2"
+    type: "conv"
+    num_output: 50
+    kernelsize: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+    blobs_lr: 1.
+    blobs_lr: 2.
+  }
+  bottom: "pool1"
+  top: "conv2"
+}
+layers {
+  layer {
+    name: "pool2"
+    type: "pool"
+    kernelsize: 2
+    stride: 2
+    pool: MAX
+  }
+  bottom: "conv2"
+  top: "pool2"
+}
+layers {
+  layer {
+    name: "ip1"
+    type: "innerproduct"
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+    blobs_lr: 1.
+    blobs_lr: 2.
+  }
+  bottom: "pool2"
+  top: "ip1"
+}
+layers {
+  layer {
+    name: "relu1"
+    type: "relu"
+  }
+  bottom: "ip1"
+  top: "ip1"
+}
+layers {
+  layer {
+    name: "ip2"
+    type: "innerproduct"
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+    blobs_lr: 1.
+    blobs_lr: 2.
+  }
+  bottom: "ip1"
+  top: "ip2"
+}
+layers {
+  layer {
+    name: "prob"
+    type: "softmax_loss"
+  }
+  bottom: "ip2"
+  bottom: "label"
+}
diff --git a/data/lenet_test.prototxt b/data/lenet_test.prototxt
new file mode 100644 (file)
index 0000000..fdda4a6
--- /dev/null
@@ -0,0 +1,123 @@
+name: "LeNet-test"
+layers {
+  layer {
+    name: "mnist"
+    type: "data"
+    source: "data/mnist-test-leveldb"
+    batchsize: 100
+    scale: 0.00390625
+  }
+  top: "data"
+  top: "label"
+}
+layers {
+  layer {
+    name: "conv1"
+    type: "conv"
+    num_output: 20
+    kernelsize: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+  bottom: "data"
+  top: "conv1"
+}
+layers {
+  layer {
+    name: "pool1"
+    type: "pool"
+    kernelsize: 2
+    stride: 2
+    pool: MAX
+  }
+  bottom: "conv1"
+  top: "pool1"
+}
+layers {
+  layer {
+    name: "conv2"
+    type: "conv"
+    num_output: 50
+    kernelsize: 5
+    stride: 1
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+  bottom: "pool1"
+  top: "conv2"
+}
+layers {
+  layer {
+    name: "pool2"
+    type: "pool"
+    kernelsize: 2
+    stride: 2
+    pool: MAX
+  }
+  bottom: "conv2"
+  top: "pool2"
+}
+layers {
+  layer {
+    name: "ip1"
+    type: "innerproduct"
+    num_output: 500
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+  bottom: "pool2"
+  top: "ip1"
+}
+layers {
+  layer {
+    name: "relu1"
+    type: "relu"
+  }
+  bottom: "ip1"
+  top: "ip1"
+}
+layers {
+  layer {
+    name: "ip2"
+    type: "innerproduct"
+    num_output: 10
+    weight_filler {
+      type: "xavier"
+    }
+    bias_filler {
+      type: "constant"
+    }
+  }
+  bottom: "ip1"
+  top: "ip2"
+}
+layers {
+  layer {
+    name: "prob"
+    type: "softmax"
+  }
+  bottom: "ip2"
+  top: "prob"
+}
+layers {
+  layer {
+    name: "accuracy"
+    type: "accuracy"
+  }
+  bottom: "prob"
+  bottom: "label"
+  top: "accuracy"
+}
index bce3dc4faada74771c938ce9303dcaad901c3e9b..4f94f5d8f459f9b4e0bca1e10d878da6eab04b86 100644 (file)
@@ -1,13 +1,17 @@
 """Functions to draw a caffe NetParameter protobuffer.
 """
 
+from caffe.proto import caffe_pb2
+from google.protobuf import text_format
 import pydot
+import os
+import sys
 
 # Internal layer and blob styles.
 LAYER_STYLE = {'shape': 'record', 'fillcolor': '#6495ED',
-         'style': 'filled,bold'}
+         'style': 'filled'}
 NEURON_LAYER_STYLE = {'shape': 'record', 'fillcolor': '#90EE90',
-         'style': 'filled,bold'}
+         'style': 'filled'}
 BLOB_STYLE = {'shape': 'octagon', 'fillcolor': '#F0E68C',
         'style': 'filled'}
 
@@ -61,3 +65,14 @@ def draw_net_to_file(caffe_net, filename):
   with open(filename, 'w') as fid:
     fid.write(draw_net(caffe_net, ext))
 
+if __name__ == '__main__':
+  if len(sys.argv) != 3:
+    print 'Usage: %s input_net_proto_file output_image_file' % \
+        os.path.basename(sys.argv[0])
+  else:
+    net = caffe_pb2.NetParameter()
+    text_format.Merge(open(sys.argv[1]).read(), net)
+    print 'Drawing net to %s' % sys.argv[2]
+    draw_net_to_file(net, sys.argv[2])
+
+