经典神经网络架构参考 v1.0(1)https://developer.aliyun.com/article/1489283
块 #6 #7 #8:
digraph AlexNetL678 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] featmap52 [label="[BatchSize,\nW=6, H=6, C=128x2]", shape="Mrecord"] reshape [label="reshape([4096])"] linear6 [label="Linear6\n[4096, 4096]"] relu6 [label="Relu"] dropout6 [label="Dropout"] linear7 [label="Linear7\n[4096, 4096]"] relu7 [label="Relu"] dropout7 [label="Dropout"] linear8 [label="Linear7\n[4096, 1000]"] softmax [label="Softmax"] oup [label="输出\n[BatchSize, 1000]", shape="Mrecord"] featmap52 -> reshape -> linear6 -> relu6 -> dropout6 -> linear7 -> relu7 -> dropout7 -> linear8 -> softmax -> oup }
3.3 VGG16
主体:
digraph VGG16 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\nW=224, H=224, C=3]", shape="Mrecord"] convblock1 [label="ConvBlock1\n[In=3, Out=64]"] featmap1 [label="[BatchSize\nW=112, H=112, C=64]", shape="Mrecord"] convblock2 [label="ConvBlock2\n[In=64, Out=128]"] featmap2 [label="[BatchSize\nW=56, H=56, C=128]", shape="Mrecord"] convblock3 [label="ConvBlock3\n[In=128, Out=256]"] featmap3 [label="[BatchSize\nW=28, H=28, C=256]", shape="Mrecord"] convblock4 [label="ConvBlock4\n[In=256, Out=512]"] featmap4 [label="[BatchSize\nW=14, H=14, C=512]", shape="Mrecord"] convblock5 [label="ConvBlock5\n[In=512, Out=512]"] featmap5 [label="[BatchSize\nW=7, H=7, C=512]", shape="Mrecord"] reshape [label="reshape([7x7x512])"] linear1 [label="Linear1\n[7x7x512, 4096]"] relu1 [label="Relu"] linear2 [label="Linear2\n[4096, 4096]"] relu2 [label="Relu"] linear3 [label="Linear3\n[4096, 1000]"] relu3 [label="Relu"] linear4 [label="Linear4\n[1000, 1000]"] softmax [label="Softmax"] oup [label="输出\n[BatchSize, 1000]", shape="Mrecord"] inp -> convblock1 -> featmap1 -> convblock2 -> featmap2 -> convblock3 -> featmap3 -> convblock4 -> featmap4 -> convblock5 -> featmap5 -> reshape -> linear1 -> relu1 -> linear2 -> relu2 -> linear3 -> relu3 -> linear4 -> softmax -> oup }
卷积块:
digraph VGG16ConvBlock { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize, W=BlockW,\n H=BlockH, C=BlockIn]", shape="Mrecord"] conv1 [label="Conv2D#1\n[K=3, P=1,\n In=BlockIn,\n Out=BlockOut]"] conv2 [label="Conv2D#2\n[K=3, P=1,\n In=BlockOut,\n Out=BlockOut]"] relu [label="relu"] maxpool [label="MaxPool2D\n[K=2, S=2]"] oup [label="输出\n[BatchSize,\n W=BlockW/2,\n H=BlockH/2,\n C=BlockOut]", shape="Mrecord"] inp -> conv1 -> conv2 -> relu -> maxpool -> oup }
3.4 ResNet18
主体:
digraph ResNet18 { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\nW=224, H=224, C=3]", shape="Mrecord"] conv0 [label="Conv2D#0\n[K=7, S=2, P=3, In=3, Out=64]"] maxpool0 [label="MaxPool2D#0\n[K=3, S=2, P=1]"] featmap0 [label="[BatchSize,\nW=56, H=56, C=64]", shape="Mrecord"] convblock1 [label="ConvBlock1\n[C=64]"] convblock2 [label="ConvBlock2\n[C=64]"] featmap2 [label="[BatchSize,\nW=56, H=56, C=64]", shape="Mrecord"] downconvblock3 [label="DownConvBlock3\n[In=64, Out=128]"] convblock4 [label="ConvBlock4\n[C=128]"] featmap4 [label="[BatchSize,\nW=28, H=28, C=128]", shape="Mrecord"] downconvblock5 [label="DownConvBlock5\n[In=128, Out=256]"] convblock6 [label="ConvBlock6\n[C=256]"] featmap6 [label="[BatchSize,\nW=14, H=14, C=256]", shape="Mrecord"] downconvblock7 [label="DownConvBlock7\n[In=256, Out=512]"] convblock8 [label="ConvBlock8\n[C=512]"] featmap8 [label="[BatchSize,\nW=7, H=7, C=512]", shape="Mrecord"] avgpool [label="AvgPool2D\n[K=7, S=7]"] featmap9 [label="[BatchSize,\nW=1, H=1, C=512]", shape="Mrecord"] reshape [label="reshape([512])"] linear [label="Linear\n[512, 1000]"] softmax [label="Softmax"] oup [label="输出\n[BatchSize, 1000]", shape="Mrecord"] inp -> conv0 -> maxpool0 -> featmap0 -> convblock1 -> convblock2 -> featmap2 -> downconvblock3 -> convblock4 -> featmap4 -> downconvblock5 -> convblock6 -> featmap6 -> downconvblock7 -> convblock8 -> featmap8 -> avgpool -> featmap9 -> reshape -> linear -> softmax -> oup }
卷积块:
digraph ResNet18ConvBlock { rankdir=BT node [ style=filled, color=Black fontcolor=White, fillcolor="#30638e", fontname="SimHei", fontsize=32, width=5, height=2, shape="box", ] inp [label="输入\n[BatchSize,\nW=BlockW, \nH=BlockH, C=BlockC]", shape="Mrecord"] conv1 [label="Conv2D#1\n[K=3, P=1,\n In=BlockC,\n Out=BlockC]"] relu1 [label="Relu"] conv2 [label="Conv2D#2\n[K=3, P=1,\n In=BlockC,\n Out=BlockC]"] add [label="+"] relu2 [label="Relu"] inp -> conv1 -> relu1 -> conv2 -> add -> relu2 -> oup inp -> add }
经典神经网络架构参考 v1.0(3)https://developer.aliyun.com/article/1489285