matplotlib.pyplot 画图实战
如果你想将神经网络的训练结果,清晰地呈现出来,不妨看看这篇文章!
技术要点:
1.一张figure呈现一个横坐标对应多个纵坐标的曲线图
2.设置标题/横纵坐标/图例
3.绘制一个点/垂直于X轴的直线
样例一
import re
import matplotlib.pyplot as plt
strings = ["0 loss: tensor(0.0028, device='cuda:0')",
"1 loss: tensor(0.0020, device='cuda:0')",
"2 loss: tensor(0.0016, device='cuda:0')",
"3 loss: tensor(0.0013, device='cuda:0')",
"4 loss: tensor(0.0011, device='cuda:0')",
"5 loss: tensor(0.0011, device='cuda:0')",
"6 loss: tensor(0.0010, device='cuda:0')",
"7 loss: tensor(0.0010, device='cuda:0')",
"8 loss: tensor(0.0010, device='cuda:0')",
"9 loss: tensor(0.0008, device='cuda:0')",
"10 loss: tensor(0.0009, device='cuda:0')",
"11 loss: tensor(0.0009, device='cuda:0')",
"12 loss: tensor(0.0008, device='cuda:0')",
"13 loss: tensor(0.0010, device='cuda:0')",
"14 loss: tensor(0.0007, device='cuda:0')",
"15 loss: tensor(0.0007, device='cuda:0')",
"16 loss: tensor(0.0007, device='cuda:0')",
"17 loss: tensor(0.0007, device='cuda:0')",
"18 loss: tensor(0.0006, device='cuda:0')",
"19 loss: tensor(0.0006, device='cuda:0')",
"20 loss: tensor(0.0006, device='cuda:0')",
"21 loss: tensor(0.0006, device='cuda:0')",
"22 loss: tensor(0.0006, device='cuda:0')",
"23 loss: tensor(0.0006, device='cuda:0')",
"24 loss: tensor(0.0005, device='cuda:0')",
"25 loss: tensor(0.0005, device='cuda:0')",
"26 loss: tensor(0.0005, device='cuda:0')",
"27 loss: tensor(0.0005, device='cuda:0')",
"28 loss: tensor(0.0005, device='cuda:0')",
"29 loss: tensor(0.0005, device='cuda:0')",
"30 loss: tensor(0.0006, device='cuda:0')"]
regex1 = re.compile(r'loss: tensor\((.*), device')
epoch = [i for i in range(31)]
loss = []
for string in strings:
match1 = regex1.search(string)
loss.append(float(match1.groups(0)[0]))
print(epoch, loss, sep='\n')
plt.figure()
plt.plot(epoch, loss, 'r--', label='Loss')
plt.scatter(29, loss[29], color='yellow', label='Epoch:29')
plt.title("AE")
plt.xlabel("Epoch")
plt.xlim(0, epoch[-1])
plt.legend(loc=0)
plt.show()
样例二
import re
import matplotlib.pyplot as plt
strings = ['Epoch:0 Loss:1.4710358381271362 Train_acc:0.31544 Test_acc:0.4004',
'Epoch:1 Loss:0.9353103637695312 Train_acc:0.52364 Test_acc:0.542',
'Epoch:2 Loss:1.0960962772369385 Train_acc:0.64252 Test_acc:0.6594',
'Epoch:3 Loss:0.9258540868759155 Train_acc:0.71328 Test_acc:0.6747',
'Epoch:4 Loss:0.5317944884300232 Train_acc:0.76936 Test_acc:0.6915',
'Epoch:5 Loss:0.5205800533294678 Train_acc:0.81624 Test_acc:0.7456',
'Epoch:6 Loss:0.3029361069202423 Train_acc:0.86136 Test_acc:0.7808',
'Epoch:7 Loss:0.29799553751945496 Train_acc:0.9004 Test_acc:0.7831',
'Epoch:8 Loss:0.18622468411922455 Train_acc:0.93372 Test_acc:0.7966',
'Epoch:9 Loss:0.11882748454809189 Train_acc:0.95552 Test_acc:0.7936',
'Epoch:10 Loss:0.20242604613304138 Train_acc:0.96818 Test_acc:0.789',
'Epoch:11 Loss:0.12406215816736221 Train_acc:0.97592 Test_acc:0.7479']
regex1 = re.compile(r'Epoch:(.*) Loss:')
regex2 = re.compile(r'Loss:(.*) Train_acc:')
regex3 = re.compile(r'Train_acc:(.*) Test_acc:')
regex4 = re.compile(r'Test_acc:(.*)')
epoch = []
loss = []
train_acc = []
test_acc = []
for string in strings:
match1 = regex1.search(string)
match2 = regex2.search(string)
match3 = regex3.search(string)
match4 = regex4.search(string)
epoch.append(int(match1.groups(0)[0]))
loss.append(float(match2.groups(0)[0]))
train_acc.append(float(match3.groups(0)[0]))
test_acc.append(float(match4.groups(0)[0]))
print(epoch, loss, train_acc, test_acc, sep='\n')
plt.figure()
plt.plot(epoch, loss, 'r--', label='Loss')
plt.plot(epoch, train_acc, 'b--', label='Train_acc')
plt.plot(epoch, test_acc, 'y--', label='Test_acc')
plt.axvline(8, color='orchid', linestyle='-', label='Epoch:8')
plt.title("ResNet18")
plt.xlabel("Epoch")
plt.xlim(0, epoch[-1])
plt.legend(loc=0)
plt.show()
参考文章如下:
python3.x plt颜色和形状随笔
https://zhuanlan.zhihu.com/p/35270196
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)