一、线性模型
1.1 线性回归
digraph LinearRegression { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize, NFeature]", shape="Mrecord"] ll [label="Linear\n[NFeature, 1]"] oup [label="输出\n[BatchSise, 1]", shape="Mrecord"] inp -> ll -> oup }
1.2 逻辑回归
digraph SoftmaxRegression { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize, NFeature]", shape="Mrecord"] ll [label="Linear\n[NFeature, NLabel]"] softmax [label="Softmax"] oup [label="输出\n[BatchSise, NLabel]", shape="Mrecord"] inp -> ll -> softmax -> oup }
1.3 Softmax 回归
digraph SoftmaxRegression { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize, NFeature]", shape="Mrecord"] ll [label="Linear\n[NFeature, NLabel]"] softmax [label="Softmax"] oup [label="输出\n[BatchSise, NLabel]", shape="Mrecord"] inp -> ll -> softmax -> oup }
二、MLP
digraph MLP { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\n NFeature(768)]", shape="Mrecord"] ll1 [label="Linear\n[NFeature(768),\n NHidden1(512)]"] relu1 [label="Relu"] ll2 [label="Linear\n[NHidden1(512),\n NHidden2(256)]"] relu2 [label="Relu"] ll3 [label="Linear\n[NHidden2(256),\n NLabels(10)]"] softmax [label="Softmax"] oup [label="输出\n[BatchSise,\n NLabels(10)]", shape="Mrecord"] inp -> ll1 -> relu1 -> ll2 -> relu2 -> ll3 -> softmax -> oup }
三、卷积神经网络
3.1 LeNet
digraph Lenet { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\n W=32, H=32, C=1]", shape="Mrecord"] conv1 [label="Conv2D 1\n[In=1, Out=6, K=5]"] relu1 [label="Relu"] featmap11 [label="[BatchSize,\n W=28, H=28, C=6]", shape="Mrecord"] pool1 [label="MaxPool2D 1\n[K=2, S=2]"] featmap12 [label="[BatchSize,\n W=14, H=14, C=6]", shape="Mrecord"] conv2 [label="Conv2D 2\n[In=6, Out=16, K=5]"] relu2 [label="Relu"] featmap21 [label="[BatchSize,\n W=10, H=10, C=16]", shape="Mrecord"] pool2 [label="MaxPool2D 2\n[K=2, S=2]"] featmap22 [label="[BatchSize,\n W=5, H=5, C=16]", shape="Mrecord"] reshape [label="reshape\n[BatchSize, 16x5x5]"] ll1 [label="Linear1\n[16x5x5, 120]"] relu3 [label="Relu"] ll2 [label="Linear2\n[120, 84]"] relu4 [label="Relu"] ll3 [label="Linear3\n[84, NLabel(10)]"] softmax [label="Softmax"] oup [label="输出\n[BatchSise,\n NLabel(10)]", shape="Mrecord"] inp -> conv1 -> relu1 -> featmap11 -> pool1 -> featmap12 -> conv2 -> relu2 -> featmap21 -> pool2 -> featmap22 -> reshape -> ll1 -> relu3 -> ll2 -> relu4 -> ll3 -> softmax -> oup }
3.2 AlexNet
块 #1:
digraph AlexNetL1 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\n W=227, H=227, C=3]", shape="Mrecord"] conv1 [label="Conv2D#1\n[K=11, S=4,\n In=3, Out=48x2]"] relu1 [label="Relu"] featmap11 [label="[BatchSize,\nW=55, H=55, C=48x2]", shape="Mrecord"] maxpool1 [label="MaxPool2D#1\n[K=3, S=2]"] featmap12 [label="[BatchSize,\nW=27, H=27, C=48x2]", shape="Mrecord"] inp -> conv1 -> relu1 -> featmap11 -> maxpool1 -> featmap12 }
块 #2:
digraph AlexNetL2 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] featmap12 [label="[BatchSize,\nW=27, H=27, C=48x2]", shape="Mrecord"] conv2 [label="Conv2D#2\n[K=5, P=2,\n In=48x2, Out=128x2]"] relu2 [label="Relu"] featmap21 [label="[BatchSize,\nW=27, H=27, C=128x2]", shape="Mrecord"] maxpool2 [label="MaxPool2D#2\n[K=3, S=2]"] featmap22 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"] featmap12 -> conv2 -> relu2 -> featmap21 -> maxpool2 -> featmap22 }
块 #3 和 #4:
digraph AlexNetL34 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] featmap22 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"] conv3 [label="Conv2D#3\n[K=3, P=1,\n In=128x2, Out=192x2]"] relu3 [label="Relu"] featmap3 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"] conv4 [label="Conv2D#4\n[K=3, P=1,\n In=192x2, Out=192x2]"] relu4 [label="Relu"] featmap4 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"] featmap22 -> conv3 -> relu3 -> featmap3 -> conv4 -> relu4 -> featmap4 }
块 #5:
digraph AlexNetL5 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] featmap4 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"] conv5 [label="Conv2D#5\n[K=3, P=1,\n In=192x2, Out=128x2]"] relu5 [label="Relu"] featmap51 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"] maxpool5 [label="MaxPool2D#5\n[K=3, S=2]"] featmap52 [label="[BatchSize,\nW=6, H=6, C=128x2]", shape="Mrecord"] featmap4 -> conv5 -> relu5 -> featmap51 -> maxpool5 -> featmap52 }
经典神经网络架构参考 v1.0(2)https://developer.aliyun.com/article/1489284