Skip to content

Commit

Permalink
Created using Colaboratory
Browse files Browse the repository at this point in the history
  • Loading branch information
sjnaj committed May 9, 2022
1 parent 92885ef commit 5adc5fc
Showing 1 changed file with 301 additions and 0 deletions.
301 changes: 301 additions & 0 deletions Basic/layersAndBlocks.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,301 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Untitled3.ipynb",
"provenance": [],
"collapsed_sections": [],
"authorship_tag": "ABX9TyMZFdaZxqHzr/+baEamEDGw",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/github/sjnaj/DeepLearning/blob/master/Basic/layersAndBlocks.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "zlmpgLiM8BNI",
"outputId": "4ebc37ad-592d-44dd-9710-07e7646cfeb4"
},
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"tensor([[-0.0708, 0.1940, -0.1375, -0.1126, -0.0094, -0.2533, -0.0626, 0.0696,\n",
" -0.2112, 0.1389],\n",
" [-0.0099, 0.0608, -0.0250, -0.0163, 0.0969, -0.3051, -0.1870, -0.1103,\n",
" -0.1439, 0.1661]], grad_fn=<AddmmBackward0>)"
]
},
"metadata": {},
"execution_count": 2
}
],
"source": [
"import torch\n",
"from torch import nn\n",
"from torch.nn import functional as F\n",
"\n",
"net = nn.Sequential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10))\n",
"\n",
"X = torch.rand(2, 20)\n",
"net(X)\n"
]
},
{
"cell_type": "markdown",
"source": [
"自定义块"
],
"metadata": {
"id": "2kP9S3ChUmY-"
}
},
{
"cell_type": "code",
"source": [
"class MLP(nn.Module):\n",
" # 用模型参数声明层。这里,我们声明两个全连接的层\n",
" def __init__(self):\n",
" # 调用MLP的父类Module的构造函数来执行必要的初始化。\n",
" # 这样,在类实例化时也可以指定其他函数参数,例如模型参数params(稍后将介绍)\n",
" super().__init__()\n",
" self.hidden = nn.Linear(20, 256) # 隐藏层\n",
" self.out = nn.Linear(256, 10) # 输出层\n",
"\n",
" # 定义模型的前向传播,即如何根据输入X返回所需的模型输出\n",
" def forward(self, X):#在nn.Module中映射到__self__上\n",
" # 注意,这里我们使用ReLU的函数版本,其在nn.functional模块中定义。\n",
" return self.out(F.relu(self.hidden(X)))"
],
"metadata": {
"id": "c0hx3TRQR0-q"
},
"execution_count": 3,
"outputs": []
},
{
"cell_type": "code",
"source": [
"net=MLP()\n",
"net(X)"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "0sDm8w2OTu_s",
"outputId": "97d9ecbd-8951-4b0e-e42b-72293e46c0c6"
},
"execution_count": 4,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"tensor([[-0.0938, -0.3164, 0.1827, 0.0036, 0.0163, -0.1917, -0.2530, 0.0999,\n",
" 0.0322, -0.0096],\n",
" [-0.0999, -0.1062, 0.1479, -0.0119, 0.1503, 0.0283, -0.1485, 0.0617,\n",
" 0.0884, -0.1171]], grad_fn=<AddmmBackward0>)"
]
},
"metadata": {},
"execution_count": 4
}
]
},
{
"cell_type": "markdown",
"source": [
"自定义顺序块"
],
"metadata": {
"id": "KMYB6ntWUpPk"
}
},
{
"cell_type": "code",
"source": [
"class MySequential(nn.Module):\n",
" def __init__(self, *args):\n",
" super().__init__()\n",
" for idx, module in enumerate(args):\n",
" # 这里,module是Module子类的一个实例。我们把它保存在'Module'类的成员\n",
" # 变量_modules中。module的类型是OrderedDict\n",
" self._modules[str(idx)] = module\n",
"\n",
" def forward(self, X):\n",
" # OrderedDict保证了按照成员添加的顺序遍历它们\n",
" for block in self._modules.values():\n",
" X = block(X)\n",
" return X\n"
],
"metadata": {
"id": "6DwsPggET1en"
},
"execution_count": 6,
"outputs": []
},
{
"cell_type": "code",
"source": [
"net = MySequential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10))\n",
"net(X)"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Acs23gnNUF74",
"outputId": "57737a2a-3801-4635-c919-f684f97e7455"
},
"execution_count": 7,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"tensor([[-0.3066, -0.0166, -0.0799, -0.0569, 0.2381, 0.0321, 0.0371, 0.0777,\n",
" 0.0003, -0.0135],\n",
" [-0.0912, -0.0402, -0.0250, 0.0942, 0.1888, -0.0461, 0.0055, 0.1226,\n",
" 0.1062, -0.1039]], grad_fn=<AddmmBackward0>)"
]
},
"metadata": {},
"execution_count": 7
}
]
},
{
"cell_type": "markdown",
"source": [
"灵活的forward"
],
"metadata": {
"id": "EuvT1RU6Uvwk"
}
},
{
"cell_type": "code",
"source": [
"class FixedHiddenMLP(nn.Module):\n",
" def __init__(self):\n",
" super().__init__()\n",
" # 不计算梯度的随机权重参数。因此其在训练期间保持不变\n",
" self.rand_weight = torch.rand((20, 20), requires_grad=False)#False当作标量,否则当作变量\n",
" self.linear = nn.Linear(20, 20)\n",
"\n",
" def forward(self, X):\n",
" X = self.linear(X)\n",
" # 使用创建的常量参数以及relu和mm函数\n",
" X = F.relu(torch.mm(X, self.rand_weight) + 1)\n",
" # 复用全连接层。这相当于两个全连接层共享参数\n",
" X = self.linear(X)\n",
" # 控制流\n",
" while X.abs().sum() > 1:\n",
" X /= 2\n",
" return X.sum()"
],
"metadata": {
"id": "ZsVZO8m1UMK8"
},
"execution_count": 9,
"outputs": []
},
{
"cell_type": "code",
"source": [
"net = FixedHiddenMLP()\n",
"net(X)"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "wJlXhHm4UdD6",
"outputId": "ac65e806-e10c-4af1-85ad-a6d3d9f79030"
},
"execution_count": 10,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"tensor(-0.2742, grad_fn=<SumBackward0>)"
]
},
"metadata": {},
"execution_count": 10
}
]
},
{
"cell_type": "markdown",
"source": [
"混搭各种块"
],
"metadata": {
"id": "1-zmHy59U23S"
}
},
{
"cell_type": "code",
"source": [
"class NestMLP(nn.Module):\n",
" def __init__(self):\n",
" super().__init__()\n",
" self.net = nn.Sequential(nn.Linear(20, 64), nn.ReLU(),\n",
" nn.Linear(64, 32), nn.ReLU())\n",
" self.linear = nn.Linear(32, 16)\n",
"\n",
" def forward(self, X):\n",
" return self.linear(self.net(X))\n",
"\n",
"chimera = nn.Sequential(NestMLP(), nn.Linear(16, 20), FixedHiddenMLP())\n",
"chimera(X)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Pa7Jgy7fUg9R",
"outputId": "eae4755f-9193-469b-f11c-84755f0f24bc"
},
"execution_count": 11,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"tensor(-0.2437, grad_fn=<SumBackward0>)"
]
},
"metadata": {},
"execution_count": 11
}
]
}
]
}

0 comments on commit 5adc5fc

Please sign in to comment.