# 经典神经网络架构参考 v1.0(1)

### 一、线性模型

#### 1.1 线性回归

digraph LinearRegression {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]

inp [label="输入\n[BatchSize, NFeature]", shape="Mrecord"]
ll  [label="Linear\n[NFeature, 1]"]
oup [label="输出\n[BatchSise, 1]", shape="Mrecord"]

inp -> ll -> oup
}

#### 1.2 逻辑回归

digraph SoftmaxRegression {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]

inp     [label="输入\n[BatchSize, NFeature]", shape="Mrecord"]
ll      [label="Linear\n[NFeature, NLabel]"]
softmax [label="Softmax"]
oup     [label="输出\n[BatchSise, NLabel]", shape="Mrecord"]

inp -> ll -> softmax -> oup
}

#### 1.3 Softmax 回归

digraph SoftmaxRegression {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]

inp     [label="输入\n[BatchSize, NFeature]", shape="Mrecord"]
ll      [label="Linear\n[NFeature, NLabel]"]
softmax [label="Softmax"]
oup     [label="输出\n[BatchSise, NLabel]", shape="Mrecord"]

inp -> ll -> softmax -> oup
}

## 二、MLP

digraph MLP {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]

inp     [label="输入\n[BatchSize,\n NFeature(768)]", shape="Mrecord"]
ll1     [label="Linear\n[NFeature(768),\n NHidden1(512)]"]
relu1   [label="Relu"]
ll2     [label="Linear\n[NHidden1(512),\n NHidden2(256)]"]
relu2   [label="Relu"]
ll3     [label="Linear\n[NHidden2(256),\n NLabels(10)]"]
softmax [label="Softmax"]
oup     [label="输出\n[BatchSise,\n NLabels(10)]", shape="Mrecord"]

inp -> ll1 -> relu1 -> ll2 -> relu2
-> ll3 -> softmax -> oup
}

### 三、卷积神经网络

#### 3.1 LeNet

digraph Lenet {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]

inp [label="输入\n[BatchSize,\n W=32, H=32, C=1]", shape="Mrecord"]
conv1 [label="Conv2D 1\n[In=1, Out=6, K=5]"]
relu1 [label="Relu"]
featmap11 [label="[BatchSize,\n W=28, H=28, C=6]", shape="Mrecord"]
pool1 [label="MaxPool2D 1\n[K=2, S=2]"]
featmap12 [label="[BatchSize,\n W=14, H=14, C=6]", shape="Mrecord"]
conv2 [label="Conv2D 2\n[In=6, Out=16, K=5]"]
relu2 [label="Relu"]
featmap21 [label="[BatchSize,\n W=10, H=10, C=16]", shape="Mrecord"]
pool2 [label="MaxPool2D 2\n[K=2, S=2]"]
featmap22 [label="[BatchSize,\n W=5, H=5, C=16]", shape="Mrecord"]
reshape [label="reshape\n[BatchSize, 16x5x5]"]
ll1 [label="Linear1\n[16x5x5, 120]"]
relu3 [label="Relu"]
ll2 [label="Linear2\n[120, 84]"]
relu4 [label="Relu"]
ll3  [label="Linear3\n[84, NLabel(10)]"]
softmax [label="Softmax"]
oup [label="输出\n[BatchSise,\n NLabel(10)]", shape="Mrecord"]

inp -> conv1 -> relu1 -> featmap11 -> pool1 -> featmap12 ->
conv2 -> relu2 -> featmap21 -> pool2 -> featmap22 ->
reshape -> ll1 -> relu3 -> ll2 -> relu4 -> ll3 ->
softmax -> oup

}

#### 3.2 AlexNet

digraph AlexNetL1 {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]
inp [label="输入\n[BatchSize,\n W=227, H=227, C=3]", shape="Mrecord"]
conv1 [label="Conv2D#1\n[K=11, S=4,\n In=3, Out=48x2]"]
relu1 [label="Relu"]
featmap11 [label="[BatchSize,\nW=55, H=55, C=48x2]", shape="Mrecord"]
maxpool1 [label="MaxPool2D#1\n[K=3, S=2]"]
featmap12 [label="[BatchSize,\nW=27, H=27, C=48x2]", shape="Mrecord"]
inp -> conv1 -> relu1 -> featmap11 -> maxpool1 -> featmap12
}

digraph AlexNetL2 {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]
featmap12 [label="[BatchSize,\nW=27, H=27, C=48x2]", shape="Mrecord"]
conv2 [label="Conv2D#2\n[K=5, P=2,\n In=48x2, Out=128x2]"]
relu2 [label="Relu"]
featmap21 [label="[BatchSize,\nW=27, H=27, C=128x2]", shape="Mrecord"]
maxpool2 [label="MaxPool2D#2\n[K=3, S=2]"]
featmap22 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"]
featmap12 -> conv2 -> relu2 -> featmap21 -> maxpool2 -> featmap22
}

digraph AlexNetL34 {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]
featmap22 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"]
conv3 [label="Conv2D#3\n[K=3, P=1,\n In=128x2, Out=192x2]"]
relu3 [label="Relu"]
featmap3 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"]
conv4 [label="Conv2D#4\n[K=3, P=1,\n In=192x2, Out=192x2]"]
relu4 [label="Relu"]
featmap4 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"]
featmap22 -> conv3 -> relu3 -> featmap3 -> conv4 -> relu4 -> featmap4
}

digraph AlexNetL5 {
rankdir=BT
node [
style=filled,
color=Black
fontcolor=White,
fillcolor="#30638e",
fontname="SimHei",
fontsize=32,
width=5, height=2,
shape="box",
]
featmap4 [label="[BatchSize,\nW=13, H=13, C=192x2]", shape="Mrecord"]
conv5 [label="Conv2D#5\n[K=3, P=1,\n In=192x2, Out=128x2]"]
relu5 [label="Relu"]
featmap51 [label="[BatchSize,\nW=13, H=13, C=128x2]", shape="Mrecord"]
maxpool5 [label="MaxPool2D#5\n[K=3, S=2]"]
featmap52 [label="[BatchSize,\nW=6, H=6, C=128x2]", shape="Mrecord"]
featmap4 -> conv5 -> relu5 -> featmap51 -> maxpool5 -> featmap52
}

|
1天前
|

9 1
|
24天前
|

【计算巢】网络模拟工具：设计与测试网络架构的有效方法
【6月更文挑战第1天】成为网络世界的超级英雄，利用网络模拟工具解决复杂架构难题！此工具提供安全的虚拟环境，允许自由设计和测试网络拓扑，进行性能挑战和压力测试。简单示例代码展示了创建网络拓扑的便捷性，它是网络设计和故障排查的“魔法棒”。无论新手还是专家，都能借助它探索网络的无限可能，开启精彩冒险！快行动起来，你会发现网络世界前所未有的乐趣！
26 2
|
4天前
|

26 2
|
7天前
|

VGG深度卷积神经网络架构
VGG深度卷积神经网络架构
14 3
|
19天前

12 1
|
1月前
|

35 4
|
25天前
|

【5月更文挑战第31天】 在现代软件开发领域，微服务架构已经成为实现灵活、可扩展及容错系统的重要解决方案。本文将深入探讨构建高效微服务架构的五个核心策略：服务划分原则、API网关设计、服务发现与注册、熔断机制以及持续集成与部署。这些策略不仅有助于开发团队提升系统的可维护性和可伸缩性，同时也确保了高可用性和服务质量。通过实践案例和性能分析，我们将展示如何有效应用这些策略以提高微服务的性能和稳定性。
23 0
|
1月前
|

30 1
|
1月前
|

【4月更文挑战第30天】在信息技术高速发展的今天，构建坚不可摧的数字堡垒已成为个人、企业乃至国家安全的重要组成部分。本文深入探讨网络安全漏洞的本质、加密技术的进展以及提升安全意识的必要性，旨在为读者提供全面的网络安全与信息安全知识框架。通过对网络攻防技术的解析和案例研究，我们揭示了防御策略的关键点，并强调了持续教育在塑造安全文化中的作用。
29 0
|
1月前
|

【专栏】30 道初级网络工程师面试题为广大网络工程师提供参考。
【4月更文挑战第28天】本文为初级网络工程师提供了30道面试题，涵盖OSI七层模型、TCP/IP协议栈、IP地址分类、ARP、VLAN、STP、DHCP、DNS、防火墙、NAT、VPN、网络拓扑、广域网、以太网、网络存储、网络拥塞、流量监控、延迟、网络安全、网络攻击防范、协议分析、性能优化、故障排查、网络虚拟化和云计算等基础知识。这些问题旨在帮助面试者准备并提升网络工程领域的知识和技能。
162 0