diff --git a/common_plot.py b/common_plot.py
index 97e8df17ce22acf1090e0c3a3082e881dc99d3b5..28479e258f3170f47a6c5c2e1df59df768d623ec 100644 (file)
--- a/common_plot.py
+++ b/common_plot.py
accuracy = re.findall(r'Test net output #\d: top-{top_k} = (\d*.\d*)'.format(top_k=top_k), log)
if len(accuracy)==0:
accuracy = re.findall(r'Test net output #\d: loss/top-{top_k} = (\d*.\d*)'.format(top_k=top_k), log)
+ if len(accuracy)==0:
+ accuracy = re.findall(r'Test net output #\d: accuracy/top{top_k} = (\d*.\d*)'.format(top_k=top_k), log)
+ if len(accuracy)==0:
+ accuracy = re.findall(r'Test net output #\d: accuracy = (\d*.\d*)', log)
iteration = [int(i) for i in iteration]
accuracy = [float(i) for i in accuracy]
return iteration, accuracy
loss = [float(i) for i in loss]
return iteration, loss
+def get_train_loss(log):
+ iteration = re.findall(r'Iteration (\d*), lr = ', log)
+ loss = re.findall(r'Train net output #\d: loss = (\d*.\d*)', log)
+ iteration = [int(i) for i in iteration]
+ loss = [float(i) for i in loss]
+ return iteration, loss
+
+def get_epochs(log):
+ gpus = re.findall(r' GPU (\d*):', log)
+ num_gpus = len(gpus)
+ #print num_gpus
+ max_iter = re.findall(r'max_iter: (\d*)', log)
+ iter_size = re.findall(r'iter_size: (\d*)', log)
+ batch_size = re.findall(r'batch_size: (\d*)',log)
+ max_iter = int(max_iter[0])
+ if len(iter_size) >0:
+ iter_size=int(iter_size[0])
+ else:
+ iter_size=1
+
+ batch_size = int(batch_size[0])
+ # print max_iter, iter_size, batch_size
+ num_epochs = int(round( (max_iter * iter_size * batch_size*num_gpus) / 1281167. +0.5))
+ return max_iter, num_epochs
def get_net_name(log):
return re.findall(r"Solving (.*)\n", log)[0]
data[net_name]["loss"] = {}
data[net_name]["loss"]["loss"] = []
data[net_name]["loss"]["iteration"] = []
+ data[net_name]["train_loss"] = {}
+ data[net_name]["train_loss"]["loss"] = []
+ data[net_name]["train_loss"]["iteration"] = []
+
+ max_iter, epochs = get_epochs(log)
+ #print epochs
+ scale = float(epochs) / max_iter
+
iteration, accuracy = get_test_accuracy(log, top_k)
+ iteration = [k*scale for k in iteration]
data[net_name]["accuracy"]["iteration"].extend(iteration)
data[net_name]["accuracy"]["accuracy"].extend(accuracy)
iteration, loss = get_test_loss(log)
+ iteration = [k*scale for k in iteration]
data[net_name]["loss"]["iteration"].extend(iteration)
data[net_name]["loss"]["loss"].extend(loss)
+
+
+ iteration, loss = get_train_loss(log)
+ iteration = [k*scale for k in iteration]
+ data[net_name]["train_loss"]["iteration"].extend(iteration)
+ data[net_name]["train_loss"]["loss"].extend(loss)
+
return data
plt.legend(nets, loc='lower right')
plt.title("Top {}".format(top_k))
- plt.xlabel("Iteration")
+ plt.xlabel("Epochs")
plt.ylabel("Accuracy [%]")
plt.ylim(0,100)
plt.grid()
plt.xlim(0)
plt.grid()
return plt
+
+def plot_train_loss(data, value_at_hover=False):
+ nets = data.keys()
+ colors = iter(cm.rainbow(np.linspace(0, 1, len(nets))))
+ fig = plt.figure()
+ ax = fig.add_subplot(111)
+ for net in nets:
+ iteration = data[net]["train_loss"]["iteration"]
+ loss = data[net]["train_loss"]["loss"]
+ iteration, loss = (list(t) for t in zip(*sorted(zip(iteration, loss))))
+ ax.scatter(iteration, loss, color=next(colors))
+ if value_at_hover:
+ cursor = FollowDotCursor(ax, iteration, loss)
+
+ plt.legend(nets, loc='upper right')
+ plt.title("Log Loss")
+ plt.xlabel("Iteration")
+ plt.ylabel("Log Loss")
+ plt.xlim(0)
+ plt.grid()
+ return plt
+