{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# 1.权值初始化\n", "\n", "Q:梯度爆炸的代码演示?" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "layer:0, std:15.959932327270508\n", "layer:1, std:256.6237487792969\n", "layer:2, std:4107.24560546875\n", "layer:3, std:65576.8125\n", "layer:4, std:1045011.875\n", "layer:5, std:17110408.0\n", "layer:6, std:275461408.0\n", "layer:7, std:4402537984.0\n", "layer:8, std:71323615232.0\n", "layer:9, std:1148104736768.0\n", "layer:10, std:17911758454784.0\n", "layer:11, std:283574846619648.0\n", "layer:12, std:4480599809064960.0\n", "layer:13, std:7.196814275405414e+16\n", "layer:14, std:1.1507761512626258e+18\n", "layer:15, std:1.853110740188555e+19\n", "layer:16, std:2.9677725826641455e+20\n", "layer:17, std:4.780376223769898e+21\n", "layer:18, std:7.613223480799065e+22\n", "layer:19, std:1.2092652108825478e+24\n", "layer:20, std:1.923257075956356e+25\n", "layer:21, std:3.134467063655912e+26\n", "layer:22, std:5.014437766285408e+27\n", "layer:23, std:8.066615144249704e+28\n", "layer:24, std:1.2392661553516338e+30\n", "layer:25, std:1.9455688099759845e+31\n", "layer:26, std:3.0238180658999113e+32\n", "layer:27, std:4.950357571077011e+33\n", "layer:28, std:8.150925520353362e+34\n", "layer:29, std:1.322983152787379e+36\n", "layer:30, std:2.0786820453988485e+37\n", "layer:31, std:nan\n", "output is nan in 31 layers\n", "tensor([[ inf, -2.6817e+38, inf, ..., inf,\n", " inf, inf],\n", " [ -inf, -inf, 1.4387e+38, ..., -1.3409e+38,\n", " -1.9659e+38, -inf],\n", " [-1.5873e+37, inf, -inf, ..., inf,\n", " -inf, 1.1484e+38],\n", " ...,\n", " [ 2.7754e+38, -1.6783e+38, -1.5531e+38, ..., inf,\n", " -9.9440e+37, -2.5132e+38],\n", " [-7.7184e+37, -inf, inf, ..., -2.6505e+38,\n", " inf, inf],\n", " [ inf, inf, -inf, ..., -inf,\n", " inf, 1.7432e+38]], grad_fn=)\n" ] } ], "source": [ "import torch\n", "import random\n", "import numpy as np\n", "import torch.nn as nn\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "class MLP(nn.Module):\n", " def __init__(self, neural_num, layers):\n", " super(MLP, self).__init__()\n", " self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for i in range(layers)])\n", " self.neural_num = neural_num\n", "\n", " def forward(self, x):\n", " for (i, linear) in enumerate(self.linears):\n", " x = linear(x)\n", "\n", " print(\"layer:{}, std:{}\".format(i, x.std()))\n", " if torch.isnan(x.std()):\n", " print(\"output is nan in {} layers\".format(i))\n", " break\n", "\n", " return x\n", "\n", " def initialize(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " nn.init.normal_(m.weight.data) # normal: mean=0, std=1\n", "\n", "# 100层神经网络\n", "layer_nums = 100\n", "# 每一层神经元数为256\n", "neural_nums = 256\n", "batch_size = 16\n", "\n", "net = MLP(neural_nums, layer_nums)\n", "net.initialize()\n", "\n", "inputs = torch.randn((batch_size, neural_nums)) # normal: mean=0, std=1\n", "\n", "output = net(inputs)\n", "print(output)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:更改初始权值解决不带激活函数全连接网络的梯度爆炸问题?" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "layer:0, std:0.9974957704544067\n", "layer:1, std:1.0024365186691284\n", "layer:2, std:1.002745509147644\n", "layer:3, std:1.0006227493286133\n", "layer:4, std:0.9966009855270386\n", "layer:5, std:1.019859790802002\n", "layer:6, std:1.026173710823059\n", "layer:7, std:1.0250457525253296\n", "layer:8, std:1.0378952026367188\n", "layer:9, std:1.0441951751708984\n", "layer:10, std:1.0181655883789062\n", "layer:11, std:1.0074602365493774\n", "layer:12, std:0.9948930144309998\n", "layer:13, std:0.9987586140632629\n", "layer:14, std:0.9981392025947571\n", "layer:15, std:1.0045733451843262\n", "layer:16, std:1.0055204629898071\n", "layer:17, std:1.0122840404510498\n", "layer:18, std:1.0076017379760742\n", "layer:19, std:1.000280737876892\n", "layer:20, std:0.9943006038665771\n", "layer:21, std:1.012800931930542\n", "layer:22, std:1.012657642364502\n", "layer:23, std:1.018149971961975\n", "layer:24, std:0.9776086211204529\n", "layer:25, std:0.9592394828796387\n", "layer:26, std:0.9317858815193176\n", "layer:27, std:0.9534041881561279\n", "layer:28, std:0.9811319708824158\n", "layer:29, std:0.9953019022941589\n", "layer:30, std:0.9773916006088257\n", "layer:31, std:0.9655940532684326\n", "layer:32, std:0.9270440936088562\n", "layer:33, std:0.9329946637153625\n", "layer:34, std:0.9311841726303101\n", "layer:35, std:0.9354336261749268\n", "layer:36, std:0.9492132067680359\n", "layer:37, std:0.9679954648017883\n", "layer:38, std:0.9849981665611267\n", "layer:39, std:0.9982335567474365\n", "layer:40, std:0.9616852402687073\n", "layer:41, std:0.9439758658409119\n", "layer:42, std:0.9631161093711853\n", "layer:43, std:0.958673894405365\n", "layer:44, std:0.9675614237785339\n", "layer:45, std:0.9837557077407837\n", "layer:46, std:0.9867278337478638\n", "layer:47, std:0.9920817017555237\n", "layer:48, std:0.9650403261184692\n", "layer:49, std:0.9991624355316162\n", "layer:50, std:0.9946174025535583\n", "layer:51, std:0.9662044048309326\n", "layer:52, std:0.9827387928962708\n", "layer:53, std:0.9887880086898804\n", "layer:54, std:0.9932605624198914\n", "layer:55, std:1.0237400531768799\n", "layer:56, std:0.9702046513557434\n", "layer:57, std:1.0045380592346191\n", "layer:58, std:0.9943899512290955\n", "layer:59, std:0.9900636076927185\n", "layer:60, std:0.99446702003479\n", "layer:61, std:0.9768352508544922\n", "layer:62, std:0.9797843098640442\n", "layer:63, std:0.9951220750808716\n", "layer:64, std:0.9980446696281433\n", "layer:65, std:1.0086933374404907\n", "layer:66, std:1.0276142358779907\n", "layer:67, std:1.0429234504699707\n", "layer:68, std:1.0197855234146118\n", "layer:69, std:1.0319130420684814\n", "layer:70, std:1.0540012121200562\n", "layer:71, std:1.026781439781189\n", "layer:72, std:1.0331352949142456\n", "layer:73, std:1.0666675567626953\n", "layer:74, std:1.0413838624954224\n", "layer:75, std:1.0733673572540283\n", "layer:76, std:1.0404183864593506\n", "layer:77, std:1.0344083309173584\n", "layer:78, std:1.0022705793380737\n", "layer:79, std:0.99835205078125\n", "layer:80, std:0.9732587337493896\n", "layer:81, std:0.9777462482452393\n", "layer:82, std:0.9753198623657227\n", "layer:83, std:0.9938382506370544\n", "layer:84, std:0.9472599029541016\n", "layer:85, std:0.9511011242866516\n", "layer:86, std:0.9737769961357117\n", "layer:87, std:1.005651831626892\n", "layer:88, std:1.0043526887893677\n", "layer:89, std:0.9889539480209351\n", "layer:90, std:1.0130352973937988\n", "layer:91, std:1.0030947923660278\n", "layer:92, std:0.9993206262588501\n", "layer:93, std:1.0342745780944824\n", "layer:94, std:1.031973123550415\n", "layer:95, std:1.0413124561309814\n", "layer:96, std:1.0817031860351562\n", "layer:97, std:1.128799557685852\n", "layer:98, std:1.1617802381515503\n", "layer:99, std:1.2215303182601929\n", "tensor([[-1.0696, -1.1373, 0.5047, ..., -0.4766, 1.5904, -0.1076],\n", " [ 0.4572, 1.6211, 1.9659, ..., -0.3558, -1.1235, 0.0979],\n", " [ 0.3908, -0.9998, -0.8680, ..., -2.4161, 0.5035, 0.2814],\n", " ...,\n", " [ 0.1876, 0.7971, -0.5918, ..., 0.5395, -0.8932, 0.1211],\n", " [-0.0102, -1.5027, -2.6860, ..., 0.6954, -0.1858, -0.8027],\n", " [-0.5871, -1.3739, -2.9027, ..., 1.6734, 0.5094, -0.9986]],\n", " grad_fn=)\n" ] } ], "source": [ "import torch\n", "import random\n", "import numpy as np\n", "import torch.nn as nn\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "class MLP(nn.Module):\n", " def __init__(self, neural_num, layers):\n", " super(MLP, self).__init__()\n", " self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for i in range(layers)])\n", " self.neural_num = neural_num\n", "\n", " def forward(self, x):\n", " for (i, linear) in enumerate(self.linears):\n", " x = linear(x)\n", "\n", " print(\"layer:{}, std:{}\".format(i, x.std()))\n", " if torch.isnan(x.std()):\n", " print(\"output is nan in {} layers\".format(i))\n", " break\n", "\n", " return x\n", "\n", " def initialize(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " # 注意此处的std\n", " nn.init.normal_(m.weight.data, std=np.sqrt(1/self.neural_num)) # normal: mean=0, std=1\n", "\n", "# 100层神经网络\n", "layer_nums = 100\n", "# 每一层神经元数为256\n", "neural_nums = 256\n", "batch_size = 16\n", "\n", "net = MLP(neural_nums, layer_nums)\n", "net.initialize()\n", "\n", "inputs = torch.randn((batch_size, neural_nums)) # normal: mean=0, std=1\n", "\n", "output = net(inputs)\n", "print(output)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:添加tanh激活函数后的全连接网络,梯度消失代码示例" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "layer:0, std:0.6273701786994934\n", "layer:1, std:0.48910173773765564\n", "layer:2, std:0.4099564850330353\n", "layer:3, std:0.35637012124061584\n", "layer:4, std:0.32117360830307007\n", "layer:5, std:0.2981105148792267\n", "layer:6, std:0.27730831503868103\n", "layer:7, std:0.2589356303215027\n", "layer:8, std:0.2468511462211609\n", "layer:9, std:0.23721906542778015\n", "layer:10, std:0.22171513736248016\n", "layer:11, std:0.21079954504966736\n", "layer:12, std:0.19820132851600647\n", "layer:13, std:0.19069305062294006\n", "layer:14, std:0.18555502593517303\n", "layer:15, std:0.17953835427761078\n", "layer:16, std:0.17485806345939636\n", "layer:17, std:0.1702701896429062\n", "layer:18, std:0.16508983075618744\n", "layer:19, std:0.1591130942106247\n", "layer:20, std:0.15480300784111023\n", "layer:21, std:0.15263864398002625\n", "layer:22, std:0.148549422621727\n", "layer:23, std:0.14617665112018585\n", "layer:24, std:0.13876432180404663\n", "layer:25, std:0.13316625356674194\n", "layer:26, std:0.12660598754882812\n", "layer:27, std:0.12537942826747894\n", "layer:28, std:0.12535445392131805\n", "layer:29, std:0.12589804828166962\n", "layer:30, std:0.11994210630655289\n", "layer:31, std:0.11700887233018875\n", "layer:32, std:0.11137297749519348\n", "layer:33, std:0.11154612898826599\n", "layer:34, std:0.10991233587265015\n", "layer:35, std:0.10996390879154205\n", "layer:36, std:0.10969001054763794\n", "layer:37, std:0.10975216329097748\n", "layer:38, std:0.11063200235366821\n", "layer:39, std:0.11021336913108826\n", "layer:40, std:0.10465587675571442\n", "layer:41, std:0.10141163319349289\n", "layer:42, std:0.1026025265455246\n", "layer:43, std:0.10079070925712585\n", "layer:44, std:0.10096712410449982\n", "layer:45, std:0.10117629915475845\n", "layer:46, std:0.10145658999681473\n", "layer:47, std:0.09987485408782959\n", "layer:48, std:0.09677786380052567\n", "layer:49, std:0.099615179002285\n", "layer:50, std:0.09867013245820999\n", "layer:51, std:0.09398546814918518\n", "layer:52, std:0.09388342499732971\n", "layer:53, std:0.09352942556142807\n", "layer:54, std:0.09336657077074051\n", "layer:55, std:0.0948176234960556\n", "layer:56, std:0.08856320381164551\n", "layer:57, std:0.09024856984615326\n", "layer:58, std:0.088644839823246\n", "layer:59, std:0.08766943216323853\n", "layer:60, std:0.08726289123296738\n", "layer:61, std:0.08623495697975159\n", "layer:62, std:0.08549778908491135\n", "layer:63, std:0.0855521708726883\n", "layer:64, std:0.0853666365146637\n", "layer:65, std:0.08462794870138168\n", "layer:66, std:0.0852193832397461\n", "layer:67, std:0.08562126755714417\n", "layer:68, std:0.08368431031703949\n", "layer:69, std:0.08476374298334122\n", "layer:70, std:0.0853630006313324\n", "layer:71, std:0.08237560093402863\n", "layer:72, std:0.08133518695831299\n", "layer:73, std:0.08416958898305893\n", "layer:74, std:0.08226992189884186\n", "layer:75, std:0.08379074186086655\n", "layer:76, std:0.08003697544336319\n", "layer:77, std:0.07888862490653992\n", "layer:78, std:0.07618380337953568\n", "layer:79, std:0.07458437979221344\n", "layer:80, std:0.07207276672124863\n", "layer:81, std:0.07079190015792847\n", "layer:82, std:0.0712786465883255\n", "layer:83, std:0.07165777683258057\n", "layer:84, std:0.06893909722566605\n", "layer:85, std:0.0690247192978859\n", "layer:86, std:0.07030878216028214\n", "layer:87, std:0.07283661514520645\n", "layer:88, std:0.07280214875936508\n", "layer:89, std:0.07130246609449387\n", "layer:90, std:0.07225215435028076\n", "layer:91, std:0.0712454542517662\n", "layer:92, std:0.07088854163885117\n", "layer:93, std:0.0730612576007843\n", "layer:94, std:0.07276967912912369\n", "layer:95, std:0.07259567081928253\n", "layer:96, std:0.07586522400379181\n", "layer:97, std:0.07769150286912918\n", "layer:98, std:0.07842090725898743\n", "layer:99, std:0.08206238597631454\n", "tensor([[-0.1103, -0.0739, 0.1278, ..., -0.0508, 0.1544, -0.0107],\n", " [ 0.0807, 0.1208, 0.0030, ..., -0.0385, -0.1887, -0.0294],\n", " [ 0.0321, -0.0833, -0.1482, ..., -0.1133, 0.0206, 0.0155],\n", " ...,\n", " [ 0.0108, 0.0560, -0.1099, ..., 0.0459, -0.0961, -0.0124],\n", " [ 0.0398, -0.0874, -0.2312, ..., 0.0294, -0.0562, -0.0556],\n", " [-0.0234, -0.0297, -0.1155, ..., 0.1143, 0.0083, -0.0675]],\n", " grad_fn=)\n" ] } ], "source": [ "import torch\n", "import random\n", "import numpy as np\n", "import torch.nn as nn\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "class MLP(nn.Module):\n", " def __init__(self, neural_num, layers):\n", " super(MLP, self).__init__()\n", " self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for i in range(layers)])\n", " self.neural_num = neural_num\n", "\n", " def forward(self, x):\n", " for (i, linear) in enumerate(self.linears):\n", " x = linear(x)\n", " # 多添加了tanh激活函数\n", " x = torch.tanh(x)\n", "\n", " print(\"layer:{}, std:{}\".format(i, x.std()))\n", " if torch.isnan(x.std()):\n", " print(\"output is nan in {} layers\".format(i))\n", " break\n", "\n", " return x\n", "\n", " def initialize(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " # 注意此处的std\n", " nn.init.normal_(m.weight.data, std=np.sqrt(1/self.neural_num)) # normal: mean=0, std=1\n", "\n", " # a = np.sqrt(6 / (self.neural_num + self.neural_num))\n", " #\n", " # tanh_gain = nn.init.calculate_gain('tanh')\n", " # a *= tanh_gain\n", " #\n", " # nn.init.uniform_(m.weight.data, -a, a)\n", "\n", " # nn.init.xavier_uniform_(m.weight.data, gain=tanh_gain)\n", "\n", " # nn.init.normal_(m.weight.data, std=np.sqrt(2 / self.neural_num))\n", "# nn.init.kaiming_normal_(m.weight.data)\n", "\n", "# 100层神经网络\n", "layer_nums = 100\n", "# 每一层神经元数为256\n", "neural_nums = 256\n", "batch_size = 16\n", "\n", "net = MLP(neural_nums, layer_nums)\n", "net.initialize()\n", "\n", "inputs = torch.randn((batch_size, neural_nums)) # normal: mean=0, std=1\n", "\n", "output = net(inputs)\n", "print(output)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:使用Xavier初始化tanh激活函数的全连接网络解决梯度消失问题的代码示例?" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "layer:0, std:0.7571136355400085\n", "layer:1, std:0.6924336552619934\n", "layer:2, std:0.6677976846694946\n", "layer:3, std:0.6551960110664368\n", "layer:4, std:0.655646800994873\n", "layer:5, std:0.6536089777946472\n", "layer:6, std:0.6500504612922668\n", "layer:7, std:0.6465446949005127\n", "layer:8, std:0.6456685662269592\n", "layer:9, std:0.6414617896080017\n", "layer:10, std:0.6423627734184265\n", "layer:11, std:0.6509683728218079\n", "layer:12, std:0.6584846377372742\n", "layer:13, std:0.6530249118804932\n", "layer:14, std:0.6528729796409607\n", "layer:15, std:0.6523412466049194\n", "layer:16, std:0.6534921526908875\n", "layer:17, std:0.6540238261222839\n", "layer:18, std:0.6477403044700623\n", "layer:19, std:0.6469652652740479\n", "layer:20, std:0.6441705822944641\n", "layer:21, std:0.6484488248825073\n", "layer:22, std:0.6512865424156189\n", "layer:23, std:0.6525684595108032\n", "layer:24, std:0.6531476378440857\n", "layer:25, std:0.6488809585571289\n", "layer:26, std:0.6533839702606201\n", "layer:27, std:0.6482065320014954\n", "layer:28, std:0.6471589803695679\n", "layer:29, std:0.6553042531013489\n", "layer:30, std:0.6560811400413513\n", "layer:31, std:0.6522760987281799\n", "layer:32, std:0.6499098539352417\n", "layer:33, std:0.6568747758865356\n", "layer:34, std:0.6544532179832458\n", "layer:35, std:0.6535674929618835\n", "layer:36, std:0.6508696675300598\n", "layer:37, std:0.6428772807121277\n", "layer:38, std:0.6495102643966675\n", "layer:39, std:0.6479291319847107\n", "layer:40, std:0.6470604538917542\n", "layer:41, std:0.6513484716415405\n", "layer:42, std:0.6503545045852661\n", "layer:43, std:0.6458993554115295\n", "layer:44, std:0.6517387628555298\n", "layer:45, std:0.6520006060600281\n", "layer:46, std:0.6539937257766724\n", "layer:47, std:0.6537032723426819\n", "layer:48, std:0.6516646146774292\n", "layer:49, std:0.6535552740097046\n", "layer:50, std:0.6464877724647522\n", "layer:51, std:0.6491119265556335\n", "layer:52, std:0.6455202102661133\n", "layer:53, std:0.6520237326622009\n", "layer:54, std:0.6531855463981628\n", "layer:55, std:0.6627183556556702\n", "layer:56, std:0.6544181108474731\n", "layer:57, std:0.6501768827438354\n", "layer:58, std:0.6510448455810547\n", "layer:59, std:0.6549468040466309\n", "layer:60, std:0.6529951691627502\n", "layer:61, std:0.6515748500823975\n", "layer:62, std:0.6453633904457092\n", "layer:63, std:0.644793689250946\n", "layer:64, std:0.6489539742469788\n", "layer:65, std:0.6553947925567627\n", "layer:66, std:0.6535270810127258\n", "layer:67, std:0.6528791785240173\n", "layer:68, std:0.6492816209793091\n", "layer:69, std:0.6596571207046509\n", "layer:70, std:0.6536712646484375\n", "layer:71, std:0.6498764157295227\n", "layer:72, std:0.6538681387901306\n", "layer:73, std:0.64595627784729\n", "layer:74, std:0.6543275117874146\n", "layer:75, std:0.6525828838348389\n", "layer:76, std:0.6462088227272034\n", "layer:77, std:0.6534948945045471\n", "layer:78, std:0.6461930871009827\n", "layer:79, std:0.6457878947257996\n", "layer:80, std:0.6481245160102844\n", "layer:81, std:0.6496317386627197\n", "layer:82, std:0.6516988277435303\n", "layer:83, std:0.6485154032707214\n", "layer:84, std:0.6395408511161804\n", "layer:85, std:0.6498249173164368\n", "layer:86, std:0.6510564088821411\n", "layer:87, std:0.6505221724510193\n", "layer:88, std:0.6573457717895508\n", "layer:89, std:0.6529723405838013\n", "layer:90, std:0.6536353230476379\n", "layer:91, std:0.6497699022293091\n", "layer:92, std:0.6459059715270996\n", "layer:93, std:0.6459072232246399\n", "layer:94, std:0.6530925631523132\n", "layer:95, std:0.6515892148017883\n", "layer:96, std:0.6434286832809448\n", "layer:97, std:0.6425578594207764\n", "layer:98, std:0.6407340168952942\n", "layer:99, std:0.6442393660545349\n", "tensor([[ 0.1133, 0.1239, 0.8211, ..., 0.9411, -0.6334, 0.5155],\n", " [-0.9585, -0.2371, 0.8548, ..., -0.2339, 0.9326, 0.0114],\n", " [ 0.9487, -0.2279, 0.8735, ..., -0.9593, 0.7922, 0.6263],\n", " ...,\n", " [ 0.7257, 0.0800, -0.4440, ..., -0.9589, 0.2604, 0.5402],\n", " [-0.9572, 0.5179, -0.8041, ..., -0.4298, -0.6087, 0.9679],\n", " [ 0.6105, 0.3994, 0.1072, ..., 0.3904, -0.5274, 0.0776]],\n", " grad_fn=)\n" ] } ], "source": [ "import torch\n", "import random\n", "import numpy as np\n", "import torch.nn as nn\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "class MLP(nn.Module):\n", " def __init__(self, neural_num, layers):\n", " super(MLP, self).__init__()\n", " self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for i in range(layers)])\n", " self.neural_num = neural_num\n", "\n", " def forward(self, x):\n", " for (i, linear) in enumerate(self.linears):\n", " x = linear(x)\n", " # 多添加了tanh激活函数\n", " x = torch.tanh(x)\n", "\n", " print(\"layer:{}, std:{}\".format(i, x.std()))\n", " if torch.isnan(x.std()):\n", " print(\"output is nan in {} layers\".format(i))\n", " break\n", "\n", " return x\n", "\n", " def initialize(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " # 手动计算xavier\n", " a = np.sqrt(6 / (self.neural_num + self.neural_num))\n", " tanh_gain = nn.init.calculate_gain('tanh')\n", " a *= tanh_gain\n", " nn.init.uniform_(m.weight.data, -a, a)\n", " # pytorch提供的xavier初始化方法,效果同上\n", " # tanh_gain = nn.init.calculate_gain('tanh')\n", " # nn.init.xavier_uniform_(m.weight.data, gain=tanh_gain)\n", "\n", " # nn.init.normal_(m.weight.data, std=np.sqrt(2 / self.neural_num))\n", "# nn.init.kaiming_normal_(m.weight.data)\n", "\n", "# 100层神经网络\n", "layer_nums = 100\n", "# 每一层神经元数为256\n", "neural_nums = 256\n", "batch_size = 16\n", "\n", "net = MLP(neural_nums, layer_nums)\n", "net.initialize()\n", "\n", "inputs = torch.randn((batch_size, neural_nums)) # normal: mean=0, std=1\n", "\n", "output = net(inputs)\n", "print(output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "Q:使用He初始化(Kaiming)relu激活函数的全连接网络解决梯度消失问题的代码示例?" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "layer:0, std:0.826629638671875\n", "layer:1, std:0.8786815404891968\n", "layer:2, std:0.9134422540664673\n", "layer:3, std:0.8892471194267273\n", "layer:4, std:0.834428071975708\n", "layer:5, std:0.874537467956543\n", "layer:6, std:0.7926971316337585\n", "layer:7, std:0.7806458473205566\n", "layer:8, std:0.8684563636779785\n", "layer:9, std:0.9434137344360352\n", "layer:10, std:0.964215874671936\n", "layer:11, std:0.8896796107292175\n", "layer:12, std:0.8287257552146912\n", "layer:13, std:0.8519769906997681\n", "layer:14, std:0.8354345560073853\n", "layer:15, std:0.802306056022644\n", "layer:16, std:0.8613607287406921\n", "layer:17, std:0.7583686709403992\n", "layer:18, std:0.8120225071907043\n", "layer:19, std:0.791111171245575\n", "layer:20, std:0.7164372801780701\n", "layer:21, std:0.778393030166626\n", "layer:22, std:0.8672043085098267\n", "layer:23, std:0.874812662601471\n", "layer:24, std:0.9020991325378418\n", "layer:25, std:0.8585715889930725\n", "layer:26, std:0.7824353575706482\n", "layer:27, std:0.7968912720680237\n", "layer:28, std:0.8984369039535522\n", "layer:29, std:0.8704465627670288\n", "layer:30, std:0.9860473275184631\n", "layer:31, std:0.9080777168273926\n", "layer:32, std:0.9140636920928955\n", "layer:33, std:1.009956955909729\n", "layer:34, std:0.9909380674362183\n", "layer:35, std:1.0253208875656128\n", "layer:36, std:0.849043607711792\n", "layer:37, std:0.703953742980957\n", "layer:38, std:0.7186155319213867\n", "layer:39, std:0.7250635027885437\n", "layer:40, std:0.7030817270278931\n", "layer:41, std:0.6325559020042419\n", "layer:42, std:0.6623690724372864\n", "layer:43, std:0.6960875988006592\n", "layer:44, std:0.7140733003616333\n", "layer:45, std:0.632905125617981\n", "layer:46, std:0.6458898186683655\n", "layer:47, std:0.7354375720024109\n", "layer:48, std:0.6710687279701233\n", "layer:49, std:0.6939153671264648\n", "layer:50, std:0.6889258027076721\n", "layer:51, std:0.6331773996353149\n", "layer:52, std:0.6029313206672668\n", "layer:53, std:0.6145528554916382\n", "layer:54, std:0.6636686325073242\n", "layer:55, std:0.7440094947814941\n", "layer:56, std:0.7972175478935242\n", "layer:57, std:0.7606149911880493\n", "layer:58, std:0.696868360042572\n", "layer:59, std:0.7306802272796631\n", "layer:60, std:0.6875627636909485\n", "layer:61, std:0.7171440720558167\n", "layer:62, std:0.7646605372428894\n", "layer:63, std:0.7965086698532104\n", "layer:64, std:0.8833740949630737\n", "layer:65, std:0.8592952489852905\n", "layer:66, std:0.8092936873435974\n", "layer:67, std:0.806481122970581\n", "layer:68, std:0.6792410612106323\n", "layer:69, std:0.6583346128463745\n", "layer:70, std:0.5702278017997742\n", "layer:71, std:0.5084435939788818\n", "layer:72, std:0.4869326055049896\n", "layer:73, std:0.46350404620170593\n", "layer:74, std:0.4796811640262604\n", "layer:75, std:0.47372108697891235\n", "layer:76, std:0.45414549112319946\n", "layer:77, std:0.4971912205219269\n", "layer:78, std:0.492794930934906\n", "layer:79, std:0.4422350823879242\n", "layer:80, std:0.4802998900413513\n", "layer:81, std:0.5579248666763306\n", "layer:82, std:0.5283755660057068\n", "layer:83, std:0.5451980829238892\n", "layer:84, std:0.6203726530075073\n", "layer:85, std:0.6571893095970154\n", "layer:86, std:0.703682005405426\n", "layer:87, std:0.7321067452430725\n", "layer:88, std:0.6924356818199158\n", "layer:89, std:0.6652532815933228\n", "layer:90, std:0.6728308796882629\n", "layer:91, std:0.6606621742248535\n", "layer:92, std:0.6094604730606079\n", "layer:93, std:0.6019102334976196\n", "layer:94, std:0.595421552658081\n", "layer:95, std:0.6624555587768555\n", "layer:96, std:0.6377885341644287\n", "layer:97, std:0.6079285740852356\n", "layer:98, std:0.6579315066337585\n", "layer:99, std:0.6668476462364197\n", "tensor([[0.0000, 1.3437, 0.0000, ..., 0.0000, 0.6444, 1.1867],\n", " [0.0000, 0.9757, 0.0000, ..., 0.0000, 0.4645, 0.8594],\n", " [0.0000, 1.0023, 0.0000, ..., 0.0000, 0.5148, 0.9196],\n", " ...,\n", " [0.0000, 1.2873, 0.0000, ..., 0.0000, 0.6454, 1.1411],\n", " [0.0000, 1.3589, 0.0000, ..., 0.0000, 0.6749, 1.2438],\n", " [0.0000, 1.1807, 0.0000, ..., 0.0000, 0.5668, 1.0600]],\n", " grad_fn=)\n" ] } ], "source": [ "import torch\n", "import random\n", "import numpy as np\n", "import torch.nn as nn\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "class MLP(nn.Module):\n", " def __init__(self, neural_num, layers):\n", " super(MLP, self).__init__()\n", " self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for i in range(layers)])\n", " self.neural_num = neural_num\n", "\n", " def forward(self, x):\n", " for (i, linear) in enumerate(self.linears):\n", " x = linear(x)\n", " # 多添加了relu激活函数\n", " x = torch.relu(x)\n", "\n", " print(\"layer:{}, std:{}\".format(i, x.std()))\n", " if torch.isnan(x.std()):\n", " print(\"output is nan in {} layers\".format(i))\n", " break\n", "\n", " return x\n", "\n", " def initialize(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " # 手动计算kaiming初始化\n", " nn.init.normal_(m.weight.data, std=np.sqrt(2 / self.neural_num))\n", " # pytorch官方提供kaiming初始化\n", " # nn.init.kaiming_normal_(m.weight.data)\n", "\n", "# 100层神经网络\n", "layer_nums = 100\n", "# 每一层神经元数为256\n", "neural_nums = 256\n", "batch_size = 16\n", "\n", "net = MLP(neural_nums, layer_nums)\n", "net.initialize()\n", "\n", "inputs = torch.randn((batch_size, neural_nums)) # normal: mean=0, std=1\n", "\n", "output = net(inputs)\n", "print(output)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:如何计算激活函数的方差变化尺度?\n", "- `torch.nn.init.calculate_gain(nonlinearity, param=None)`\n", "- nonlinearity:激活函数名称\n", "- param:激活函数的参数,如Leaky ReLU的negative_slop\n", "\n", "Q:calculate_gain代码示例?" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "gain:1.5827221870422363\n", "tanh_gain in PyTorch: 1.6666666666666667\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "\n", "# 数据\n", "x = torch.randn(10000)\n", "out = torch.tanh(x)\n", "\n", "# 得到标准差的尺度变化,即缩放比例\n", "gain = x.std() / out.std()\n", "print('gain:{}'.format(gain))\n", "\n", "# pytorch计算tanh的增益\n", "tanh_gain = nn.init.calculate_gain('tanh')\n", "print('tanh_gain in PyTorch:', tanh_gain)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# 2.损失函数(一)\n", "\n", "Q:如何进行交叉熵计算?\n", "- `torch.nn.CrossEntropyLoss(weight: Optional[torch.Tensor] = None, size_average=None, ignore_index: int = -100, reduce=None, reduction: str = 'mean')`\n", "- nn.LogSoftmax()与nn.NLLLoss()结合\n", "- weight:各类别的loss设置权值\n", "- ignore_index:忽略某个类别\n", "- reduction:计算模式,可为none/sum/mean\n", " - none:逐个元素计算\n", " - sum:所有元素求和,返回标量\n", " - mean:加权平均,返回标量\n", "- 忽略size_average和reduce两个字段,后续会废除\n", "\n", "\n", "Q:人民币分类模型代码演示" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Class:1, train:80, valid:10, test:10\n", "已在 /Volumes/code/GitHub/Learn-AI/pytorch_deepshare/data/rmb_split/test/1 创建划分好的数据\n", "\n", "Class:100, train:80, valid:10, test:10\n", "已在 /Volumes/code/GitHub/Learn-AI/pytorch_deepshare/data/rmb_split/test/100 创建划分好的数据\n", "\n" ] } ], "source": [ "\"\"\"\n", "# @file name : 1_split_dataset.py\n", "# @author : TingsongYu https://github.com/TingsongYu\n", "# @date : 2020-07-24 10:08:00\n", "# @brief : 将数据集划分为训练集,验证集,测试集\n", "\"\"\"\n", "\n", "import os\n", "import random\n", "import shutil\n", "\n", "\n", "def makedir(new_dir):\n", " if not os.path.exists(new_dir):\n", " os.makedirs(new_dir)\n", "\n", "\n", "if __name__ == '__main__':\n", "\n", " dataset_dir = os.path.abspath(os.path.join(\"data\", \"RMB_data\"))\n", " split_dir = os.path.abspath(os.path.join(\"data\", \"rmb_split\"))\n", " train_dir = os.path.join(split_dir, \"train\")\n", " valid_dir = os.path.join(split_dir, \"valid\")\n", " test_dir = os.path.join(split_dir, \"test\")\n", "\n", " if not os.path.exists(dataset_dir):\n", " raise Exception(\"\\n{} 不存在,请下载 02-01-数据-RMB_data.rar 放到\\n{} 下,并解压即可\".format(\n", " dataset_dir, os.path.dirname(dataset_dir)))\n", "\n", " train_pct = 0.8\n", " valid_pct = 0.1\n", " test_pct = 0.1\n", "\n", " for root, dirs, files in os.walk(dataset_dir):\n", " for sub_dir in dirs:\n", "\n", " imgs = os.listdir(os.path.join(root, sub_dir))\n", " imgs = list(filter(lambda x: x.endswith('.jpg'), imgs))\n", " random.shuffle(imgs)\n", " img_count = len(imgs)\n", "\n", " train_point = int(img_count * train_pct)\n", " valid_point = int(img_count * (train_pct + valid_pct))\n", "\n", " for i in range(img_count):\n", " if i < train_point:\n", " out_dir = os.path.join(train_dir, sub_dir)\n", " elif i < valid_point:\n", " out_dir = os.path.join(valid_dir, sub_dir)\n", " else:\n", " out_dir = os.path.join(test_dir, sub_dir)\n", "\n", " makedir(out_dir)\n", "\n", " target_path = os.path.join(out_dir, imgs[i])\n", " src_path = os.path.join(dataset_dir, sub_dir, imgs[i])\n", "\n", " shutil.copy(src_path, target_path)\n", "\n", " print('Class:{}, train:{}, valid:{}, test:{}'.format(sub_dir, train_point, valid_point-train_point,\n", " img_count-valid_point))\n", " print(\"已在 {} 创建划分好的数据\\n\".format(out_dir))\n" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Training:Epoch[000/010] Iteration[010/010] Loss: 0.6654 Acc:58.75%\n", "Valid:\t Epoch[000/010] Iteration[002/002] Loss: 0.8840 Acc:58.75%\n", "Training:Epoch[001/010] Iteration[010/010] Loss: 0.4679 Acc:84.38%\n", "Valid:\t Epoch[001/010] Iteration[002/002] Loss: 0.2392 Acc:84.38%\n", "Training:Epoch[002/010] Iteration[010/010] Loss: 0.3967 Acc:80.62%\n", "Valid:\t Epoch[002/010] Iteration[002/002] Loss: 0.1648 Acc:80.62%\n", "Training:Epoch[003/010] Iteration[010/010] Loss: 0.1178 Acc:96.88%\n", "Valid:\t Epoch[003/010] Iteration[002/002] Loss: 0.0284 Acc:96.88%\n", "Training:Epoch[004/010] Iteration[010/010] Loss: 0.0138 Acc:100.00%\n", "Valid:\t Epoch[004/010] Iteration[002/002] Loss: 0.1566 Acc:100.00%\n", "Training:Epoch[005/010] Iteration[010/010] Loss: 0.0511 Acc:98.75%\n", "Valid:\t Epoch[005/010] Iteration[002/002] Loss: 0.0001 Acc:98.75%\n", "Training:Epoch[006/010] Iteration[010/010] Loss: 0.0033 Acc:100.00%\n", "Valid:\t Epoch[006/010] Iteration[002/002] Loss: 0.0002 Acc:100.00%\n", "Training:Epoch[007/010] Iteration[010/010] Loss: 0.0440 Acc:98.12%\n", "Valid:\t Epoch[007/010] Iteration[002/002] Loss: 0.0002 Acc:98.12%\n", "Training:Epoch[008/010] Iteration[010/010] Loss: 0.0173 Acc:99.38%\n", "Valid:\t Epoch[008/010] Iteration[002/002] Loss: 0.0004 Acc:99.38%\n", "Training:Epoch[009/010] Iteration[010/010] Loss: 0.0228 Acc:99.38%\n", "Valid:\t Epoch[009/010] Iteration[002/002] Loss: 0.0006 Acc:99.38%\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9d3xkdb3///yc6ZNMyqZuku3Lwha2ZBeU3pSiFEUEVgGxYS9Xvd8fqFcRvfdyLdeKIiioiCxIEVAExEuTvp2tbGFLks2mbNok08/n98eZM32SyWZmk0k+z8dj3czMOZ98ksXP67y7kFKiUCgUiqmLNt4bUCgUCsX4ooRAoVAopjhKCBQKhWKKo4RAoVAopjhKCBQKhWKKYx3vDYyW6upqOXv27PHehkKhUBQV69at65JS1mT6rOiEYPbs2axdu3a8t6FQKBRFhRBif7bPlGtIoVAopjhKCBQKhWKKo4RAoVAopjhFFyNQKBSKXAmFQrS0tOD3+8d7K8cMp9NJU1MTNpst53uUECgUiklLS0sLHo+H2bNnI4QY7+0UHCkl3d3dtLS0MGfOnJzvU64hhUIxafH7/VRVVU0JEQAQQlBVVTVqC0gJgUKhmNRMFREwOZqfVwnBsULX87rc1rY+NhzoyeuaCoViaqKEoND4euDHS+CN3+R12R88tZNb/rotr2sqFIr80t3dzfLly1m+fDn19fU0NjbGXgeDwZzW+OhHP8rOnTsLuk8VLC40zgqIhKB1XV6XHQpGCITya2UoFIr8UlVVxcaNGwG4+eabKS0t5Wtf+1rSNVJKpJRoWubn8rvvvrvg+1QWQaERAhpXQmt+22IEwzrhPLubFArFsWH37t0sWbKET3/60zQ3N3Po0CFuuOEGVq1axeLFi7nlllti155++uls3LiRcDhMRUUFN954I8uWLeOUU06ho6MjL/tRFsGxoLEZdv7NcBO5KvOyZDCsE4qoMaMKRa585/GtbGvrz+uaixrK+PYli4/q3m3btnH33Xdz++23A3Drrbcybdo0wuEw55xzDldccQWLFi1Kuqevr4+zzjqLW2+9la985Svcdddd3HjjjWP+OZRFcCxoXGn83bYhb0sGIzqhiLIIFIpiZd68eZx00kmx1/fddx/Nzc00Nzezfft2tm1LjwG6XC4uuugiAFauXMm+ffvyshdlERwLGlYYf7eug3nn5mXJQDhCWFkECkXOHO2Te6EoKSmJfb1r1y5++tOf8vrrr1NRUcE111yTsRbAbrfHvrZYLITD4bzsRVkExwJXBVQvgNb1eVtSxQgUislDf38/Ho+HsrIyDh06xFNPPXVMv7+yCI4VjSth9z9BSiOAPEaCYR1dGQQKxaSgubmZRYsWsWTJEubOnctpp512TL+/kLK4TpNVq1bJohxM8/qd8MTX4MtboGLGmJdb/K0nAdh6y4VjXkuhmKxs376dhQsXjvc2jjmZfm4hxDop5apM1yvX0LGisdn4O0/1BMGITkiZBAqFIg8oIThW1C0Biz0vQqDrklBEElZZQwqFIg8oIThWWB1QvzQvAeNgVAB0aYiCQqFQjAUlBMeSxpVGLYEeGdMygXDcEgipzCGFQjFGlBAcSxpXQmgQOneMaZlgghCoWgKFQjFWlBAcS8wK4zHGCYIRJQQKhSJ/KCE4lkybC87ysQuBcg0pFEXB2WefnVYc9pOf/ITPfvazWe8pLS0FoK2tjSuuuCLruvlMoy+oEAghLhRC7BRC7BZCpHVGEkLMFEI8K4TYIITYLIR4TyH3M+5oWrQTaf6EQFkECsXEZfXq1axZsybpvTVr1rB69eoR721oaODBBx8s1NaSKJgQCCEswG3ARcAiYLUQYlHKZd8EHpBSrgCuBn5ZqP1MGBpXwuFtEBw66iWSLAKVQqpQTFiuuOIK/vrXvxIIBADYt28fbW1tLF++nPPOO4/m5mZOPPFEHn300bR79+3bx5IlSwDw+XxcffXVLF26lKuuugqfz5fXfRayxcTJwG4p5V4AIcQa4DIgsaWeBMqiX5cDbQXcz8SgcSXICBzaBLNOOaolAuF41lFYpY8qFLnx9xuh/c38rll/Ilx0a9aPq6qqOPnkk3nyySe57LLLWLNmDVdddRUul4tHHnmEsrIyurq6eOc738mll16add7wr371K9xuN5s3b2bz5s00Nzfn9ccopGuoETiY8Lol+l4iNwPXCCFagCeALxRwPxODhrFXGCe7hpRFoFBMZBLdQ6ZbSErJ17/+dZYuXcq73vUuWltbOXz4cNY1XnjhBa655hoAli5dytKlS/O6x0JaBJmkLfXxdTXwOynlj4QQpwD3CCGWSCmTTjchxA3ADQAzZ84syGaPGZ46KJ8xJiEIJBz+QSUECkVuDPPkXkje97738ZWvfIX169fj8/lobm7md7/7HZ2dnaxbtw6bzcbs2bMztp1OJJu1kA8KaRG0AInd1ZpId/18HHgAQEr5CuAEqlMXklLeIaVcJaVcVVNTU6DtHkPGGDBWwWKFongoLS3l7LPP5mMf+1gsSNzX10dtbS02m41nn32W/fv3D7vGmWeeyb333gvAli1b2Lx5c173WEgheAM4TggxRwhhxwgGP5ZyzQHgPAAhxEIMIegs4J4mBo0roXc/DHYd1e1JQqDSRxWKCc/q1avZtGkTV199NQAf/vCHWbt2LatWreLee+/lhBNOGPb+z3zmM3i9XpYuXcr3v/99Tj755Lzur2CuISllWAjxeeApwALcJaXcKoS4BVgrpXwM+CpwpxDi3zDcRtfLYuuLfTQkFpYtuGDUtydnDU3+X5dCUey8//3vJ/Foq66u5pVXXsl4rdfrBWD27Nls2bIFMEZUpqah5pOCDqaRUj6BEQROfO9bCV9vA47tBIaJwPRlILSjFwJVWaxQKPKIqiweDxylULPwqOMEgVA8fVRVFisUirGihGC8aIoGjI/CE6YsAoUid6aCtzmRo/l5lRCMF40rwdcDR/aO+lZVR6BQ5IbT6aS7u3vKiIGUku7ubpxO56juU8Prx4tYwHg9VM0b1a3JTeemxn/gCsXR0NTUREtLC52dkz8Z0cTpdNLU1DSqe5QQjBc1C8HqMtxDSz84qlsDEWURKBS5YLPZmDNnznhvY8KjXEPjhcUKDcuPKmCsCsoUCkU+UUIwnjSuNJrPRUKjuk3NI1AoFPlECcF40rgSIgE4vGVUtwXCOk6b8U+nLAKFQjFWlBCMJ0c5ujIY1imxG+EdNY9AoVCMFSUE40nFTHBXG5lDoyAY1nE7LICaR6BQKMaOEoIc2NHez4k3P8XO9oH8LizEUXUiDUbiFkFq1tDLe7r402sH8rZFhUIx+VFCkAOPrG9lwB/mtbe787940yro3An+/pxvCYZ1XHbDIgimxAjuf+Mgv/i/XXndokKhmNwoIRgBKSV/39IOwPZDuR/WOdPYDEho25DzLcGwjsOqYdFEmkUQiuiqyEyhUIyKKSME4YjO+gM9o75v26F+DhwZQhOw/VCeXUNwVKMrAxEdh9WCVRNpMYJgWFdFZgqFYlRMGSH46T93cdWvX6G11zeq+57c0o4m4OKlDexsHyCS76dt9zSYNnd0QhCKYLdq2C1aWtZQIKyrGQUKhWJUTBkhuPpkY9bx7c/tGdV9f9/SzslzpnH6/Gp8oQgHjgwlff7Knu6xp3A2rhpV5lAwomO3algtIq2OIBTRVUqpQqEYFVNGCBorXFyxcgb3v3GQ9r7hh0Sb7O4YYHeHl4uWTOeE6R4gOU6w6WAvq+98lSfePDTGza2EgTboTx3pnJlgWMdh0bBatLRRlcGwEgKFQjE6powQAHz27HnoUnL787lZBU9Gg8QXLK5nQZ0HTcCOBCF4bqfR0XB/91DG+3NmlIVlwbBhEdg0keYGCkUkuiT/LiyFQjFpmVJCMGOam8ubG7nv9QN09I9sFfx9SzvNMyuoL3fitFmYU13C9oRaghd3GULQNsq4Qxr1J4JmzV0IYq4hLS0wbPYhUlaBQqHIlSklBACfO2c+YV3y42d28ciGFr60ZgNX3v4K/f7kxm8HuofY2tbPRUumx95bOL0s5hrq94fYcLAXYNQB6DRsTqhbMjqLwGLECFJTRc3pZariWKFQ5MqUE4JZVSVctryB+14/wL/dv4nndnby+r4j/D3Fz//4ZsNff+GS+th7C6eX0dLjo98f4uXd3UR0SV2ZY+xCAEZhWesGyKGbaCCs47Bp2LRhLIKwsggUCkVuTDkhALjpooV8870LefRzp7H+P97NrCo3j2+KC4GUkkc3trJyViUzprlj7y+MBox3tg/w4q5OSuwWLloynbZe39hH4TWuhOAAdA9fFRzRJRFdYrdYMmYNmRaBak+tUChyZUoKQY3HwSfOmMuyGRVYNMElSxt4eU8XHQNG3GD7oQHeOuzlfcsbku5bOL0s+nk/L+zq5JR51cyc5sYf0jkyGBzbpsyAccvaYS8zn/jNGEGqa8iMDYy1lkBKiS8YGdMaCoWiOJiSQpDKpcsb0CU8sdmwCh7d1IpVE7x3abIQ1Jc5KXfZeHJLOweP+DhzQTWNlS4A2npzS0nNStVxYPeMGCdIFAJbhhYT5udjrS5+cVcXK7779NgFTqFQTHiUEAAL6jycUO/h8c2H0HXJ4xvbOHNBDdNK7EnXCSFYON3Dy3uM5nNnHldDY4UhBK29Y0wh1TRoXDGiEAQixlN6toKyfGUN7T8yhD+kx6wkhUIxeVFCEOWSZQ2s29/DIxtaaevzc1mKW8jkhHrDPTRjmotZVe4EIcjDgdm4yphWFsq+lnnQOywaNouWFAvQdRnLFhqra8gfdQsNKfeQQjHpUUIQ5ZKoG+jmx7bitlt496K6jNctisYJzjiuBiEEFW4bLptl7LUEYMQJ9DC0v5n1kqQYgZZsEQQTrICxWgSmAAwFlBAoFJMdJQRRZla5WT6jgoFAmPMX1eGODn5JZcXMCoQgJhRCCBoqnLT25EkIAFqzB4wDpkVgBouzHP5jtQh8IUMABoPhMa2jUCgmPkoIErhkmWEVXLaiMes1x9V5ePWm8zjn+NrYe42Vbtr68iAEZdPB0zBsnCApWGwRSYd/MJw/i8AXFYAhJQQKxaQn82PvFOWad86kscLJ2Qtqhr2ursyZ9LqxwsnW1r78bKKxeXghiCQKgZZUQZzoGkoNIo8W0zU0qFxDCsWkR1kECTisFi5cMh0hxKjua6xw0T0YxB/Kw6HZtAqO7IWhIxk/jlkEFg2rpiUd+KFwwtdjtQhCZrBYWQQKxWRHCUEeaIhlDuUzTpB5PsGwrqFIXIjG7hpSWUMKxVRBCUEeMFNI85I5NH05ILK6hwJJlcXJoyoD4fwHi5UQKBSTHyUEeaAhn0LgLIOa47MKgRkHcFgN11C2TKHUgTWjJR4jUK4hhWKyo4QgD9SXO9EE+UkhBcM91LoOMjSyC0Sf1B1WC7aUyuLErKHgMN1HX3irk0/fs27YRnnKNaRQTB2UEOQBm0WjrsyZn+piMIRgqAt6D6R9lJg1lDqqMtE6GG4ewSt7u3lya3tSllEqsToCZREoFJOeggqBEOJCIcROIcRuIcSNWa65UgixTQixVQjxp0Lup5A0VrjG3m8otlj2wrLErCFzVKX5ZJ9rHcFQ9HD3h4a5RlkECsWUoWBCIISwALcBFwGLgNVCiEUp1xwH3AScJqVcDHy5UPspNA0VrrF3IDWpWwwWR8bModQ21BCfT5xrsNg83APDpLv6VWWxQjFlKKRFcDKwW0q5V0oZBNYAl6Vc80ngNillD4CUsqOA+ykojZUuDvX50PMxItJig+nLMgaMgylZQ0BCo7kcLYLoIZ/NIpBSxuoH1EwChWLyU0ghaAQOJrxuib6XyAJggRDiJSHEq0KICzMtJIS4QQixVgixtrOzs0DbHRsNFS5CEUmnN5CfBRtXQttGiCQ/kQcjOkKAVRPYNOOfzzz0E11Dw80jiLmGwpkP+UBYx9QzZREoFJOfQgpBpvLc1MdlK3AccDawGviNEKIi7SYp75BSrpJSrqqpGb79w3jRWGG0nchLURkYFcZhH3RuT3rbHFwvhIhbBJF0iyCYg2soWyV04vuq+6hCMfkppBC0ADMSXjcBbRmueVRKGZJSvg3sxBCGoqOxwphtvOvwQJ4WbDb+ThldGQjr2K3GP5sZIzBnEiT3Gho5EJzNNWR+Xua0KotAoZgCFFII3gCOE0LMEULYgauBx1Ku+QtwDoAQohrDVbS3gHsqGHNrSjiutpTv/W07O9r7x75g5RxwVabFCQJhHYfVAoBNS7YIcs4aCppZQ5mf9s3U0WqPA39IjwWjFQrF5KRgQiClDAOfB54CtgMPSCm3CiFuEUJcGr3sKaBbCLENeBb4dylld6H2VEhsFo3ffexk3HYLH7nr9bG7iISIFpYlZw4FwzqOFIsgJgQJNQbDZQ35RnANmZ9XlzgA1XhOoZjsFLSOQEr5hJRygZRynpTyP6PvfUtK+Vj0ayml/IqUcpGU8kQp5ZpC7qfQNFa4+P3HTmYoGOG6375G79AYB783rjRiBAFv7K1gJO4askVjBMGUYHGJ3TKsRTBoCkGW6mPTNVRVasxsVplDCsXkRlUW55kT6sv49bUr2dM5yEPrW8e2WOMqkDoc2hR7KxiOYLeYQhC1CPT4wHqLJnBYLcPOIxjRIjBdQ6WGRTCohEChmNQoISgAp8ytwmnTaB/r1DIzYJxQYRxMDBZniBHYLRo2q8hqEYQiesyCyFZQZk4nMy0C1WZCoZjcKCEoAEIIaj1OOgbGWFNQUg0Vs5ICxsmuofQ6ApvFqC8IZQnwJraMyJY1lGoRqDYTCsXkRglBgaj1OOjoz0NxWUrAOBDSY66h1MriYERit1qwWTRCWfz/if7+QJaCMvPgrzYtAhUsVigmNUoICkSNx0HHQB56DzWuhL6DMHAYMCwCh810DaVbBHaLiA6syRYIjh/qWS2CWLA4ahGoojKFYlKjhKBA1HocdI7VNQRGhTFAm2EVmHEAiGcNJVYWm0Pts1UWJ7uGRkgfLVXpowrFVCAnIRBCzBJCvCv6tUsI4Snstoqf2jIn/f7w2Afa1y8FYYlVGAczVBabT//mZ8bAmuFTQyF7r6GhUASbRVDusqXdo1AoJh8jCoEQ4pPAg8Cvo281YVQEK4ahxmM8TY/ZKrC7oW5RLGAcyJA1FEooKLNZDIsgW9ZQrq4hp82C225UMKsYgUIxucnFIvgccBrQDyCl3AXUFnJTkwFTCPIWJ2hbD7qR+ulIyRpKdQ1ZLdkri3N1DbntFhxWDYsmVIxAoZjk5CIEgeg8AQCEEFbSu4gqUqg1hSAvmUOrwN8HR/YSCEUyZA1F6wLChkVgt2SvIzCFwOOwZm86F4rgslkQQuC2WbJaBP5QhO8/uUPVGSgURU4uQvC8EOLrgEsI8W7gz8Djhd1W8VPrMdpS52U+QWx05brkOoJY1lDcInBYNayalrWy2CwWqyyxZ00f9QUjuOxWANwOS1aL4PW3j/DL5/bw/FsTc0aEQqHIjVyE4EagE3gT+BTwBPDNQm5qMjCtxI5FE/mxCGqOB1sJtK6NNp0zfPfxeQSJ6aMaNmv2GIHZLqKyxD5Mi4lwLD5QYs/eirp70PjZ9nUPHuUPplAoJgLWkS6QUurAndE/ihyxaIKqEnt+YgSaBRpWIFvWocuzE7KGosFiPd5iwmYOtc9aRxBBCKhw2TgymLkpnhEjiFsE2ZrOdXuN+/d3DR39z6ZQKMadEYVACPE2GWICUsq5BdnRJKK2zDH2NhMmjc3w2u3YCaW5hkyLILGOIBTOEiwOhHHZLLhslqwWwVAwEismcw9jEXRFhUBZBApFcZOLa2gVcFL0zxnAz4A/FnJTk4VajzNr+qiuSz5y1+s8uK4lt8WaViEiQU4QB9KDxZFki2DYyuKQkRHktGlZ6wh80WAxGC2ts9URdHmVa0ihmAyMKARSyu6EP61Syp8A5x6DvRU9tZ7sFsHzb3Xy/FudvLgrx0BrNGC8TNuT1nQuNo8gIuOVxcP0GnLbrThtlmHrCMwYgdtuzZoV1B0VgsP9AVV9rFAUMbm4hpoTXmoYFoKqLM6BWo+Dbm+AiC6xRIu/TH7/yj4A2vtyjCGUNRJx17J8YDeRLHUEwXAEh1lZnKX76GAgHLUIsruGjKwhUwiyWwTdg0GEACnhwJEhTqgvy+1nUSgUE4oRhQD4UcLXYWAfcGVBdjPJqPE40KXx5Fxb5oy9v69rkOd2dqIJaO/PUQiEwFe7nOXeN9kSFQKLJhAiocVEJNqGepjKYl/IOOQdNo1ABotAShmrIwAocQxnEQQ5vs7DjvYB9nUNKiFQKIqUXLKGzjkWG5mM1ERrCToGkoXgnlf3Y9UElyxr4G9vHkJKiRAi2zIxhqqXMW/f07ytx0dX2jQtoY7AcA1Zou9lWncoGKHEbsVptRCM6GnWSigiiegywTVkic0nSERKSZc3wJkLagwh6FaZQwpFsZJVCIQQXxnuRinl/+Z/O5OL2rL0fkODgTAPrD3Ie06czomN5TyyoZXeoRCVJfYR1+uvXk4tUNW/FTgeMALG4eiBHtEldouFqMeIsC5jHUoTv/+0EjvO6BN/IBxPFYV451FngkUQisikZncA3kCYQFhnTrWbqhI7+1XAWKEoWoYLFntG+KMYgdoM/Yb+srGVAX+Yj5w6i+nlhpVwKMc4QV/lEgAqe7bE3rNqRjzAdAXZrCLelTRDdbEvIWsI0hvPmU//sTqCqGWQGgw2awiqShzMri5hn6olUCiKlqwWgZTyO8dyI5MRs5+/WV0speSeV/azuKGM5pmVbDzYC0B7v49FDSP7131aKXv06Uw7Eh9mb8YDAtEsIbtFi7mDghEdF5akNYYSsoYgvfGceeAnuobAqEiucMevM6uKq0rtzKpy88qe7hH3r1AoJia5ZA05gY8Di4GYo1tK+bEC7mtS4LRZKHfZYimkuzu87Ggf4LvvW4IQgvpRWgTBSISNch6Xdm0yUnVEdBpZ1HUDJLlvMs0kGIplDZkWQaoQJLuGTMtgKCVgbBaTVZc6mF1VwsPrW/GHIrH7FApF8ZBLQdk9QD1wAfA8xjyCgUJuajKROKns6W3GuMnzF9UBUFPqQBNwOEchCIR0NunzsPk6ob8VMMZVhnQ95hqyR+cRAGmtqM2MILfdgtNqWgTJYuGPuYbMGEHcIkikO0EIZlUZpsKBI8o9pFAUI7kIwXwp5X8Ag1LK3wPvBU4s7LYmD0abCeOgf3rbYZbNqKAumkFktWjUepyjsAh0NurzjRfRQTW2DBZBfGBN8iEfCOtISbJrKJzZIkgsKDPeT7UIDHGbVmJnTnUJAG93qYCxQlGM5CIEoejfvUKIJUA5MLtgO5pk1HqcdAwEONzvZ9PB3pg1YFJX7sy5liAQ1tkhZyI1e0wIrBaNcIJFYLNoMfdQqhCY9QDuaB0BpLuGzGBxLGso5hpKtQgClDmt2K0as6YZQqAyhxSK4iQXIbhDCFEJ/AfwGLAN+J+C7moSURNtM/GPFLeQyfQyZ87VxcGwThAb4drF0BIVAk0Qish4sDg6jwBIqy5OfNqPpY+mZg2lWgSOzOMquwaDsWB4udtGpdumagkUiiIll8riu6WUEYz4gOo4OkpqPQ6CYZ2H1rcwu8rN/NrSpM/ry528tLsrp7VM94/e0AxvrgE9gs2iEY7osX5DdouGQE+63iQuBNaEGEFm15DLnmIRpMUIAjEhAJhdXaIsAoWiSMnFInhbCHGHEOI8kUv5qyIJc3bxhgO9nL+4Pq3St77cyUAgzIA/lOn2JMzDnsaVEBqEzp3RTqOSUIJFEOtBlGYRxF1DsayhcGbXkNtmCIApCKltJrq9QapK40Vws6tULYFCUazkIgTHA89gDLHfJ4T4hRDi9MJua/JgjqwEeHeKWwiIFZUdziFOYD7hW2acZLzRui7aYiLBIrAmZg1ld/vE6whSrzEOfFdKHUGqRdDlDSQJwawqN219vqyN7BQKxcQllzbUPinlA1LKy4HlQBmGm0iRA6ZFUFVip3lmZdrn9dEMova+kQfYBMIRNAHW6vngLDeEwGpkDSUGi2OTy1KDxYmuoSwFZb5QBIsmYq0pzOBzohCEIzo9QyGqSuKuoTnVJUgJLT3KKlAoio1cLAKEEGcJIX4JrMcoKlPdR3OkLtpv6LyFtWmtqAGml7sAONTni7331Qc28ZsX96ZdG+v3o2nQ0Ayta6N1BAnpo8PUEQwlPO1nazExFIzgtlmSXFjGcJq4a+jIkFlDELcIZkxTtQQKRbEyohBER1V+GXgRWCKlvFJK+VDBdzZJ8Dht/Nf7T+QL5x6X8XOzMZ2ZOdQzGOThDS0ZA8jmcHrAiBMc3kaJCBAKJ7SYsMaf5lMri03XUInDkjVY7AtGcNqTq4ON4TTx6xKLyUzKXTYABvxqQI1CUWzkkjW0TErZX/CdTGI+9I6ZWT9z2ixMK7HHagle2tOFlNAzlB48DkZ0HGYLh8aVICPMCe1ltz479vRvt1gIWcy21FlcQzYrmiawW9LHVZpN6RIpcSRbBLGGcwlCkC2WoFAoJj65xAiUCBSY+oRaghffMiyB3qj7JZFAqkUAzA3tSKsstsViBMmuodRAcKbhNEPB+FAaE5fdmtRiwqwqTgwWm1lGSggUiuIjpxiBorDUlxttJqSUvBCdYdzry2ARhHUcZlM5Tx2Uz2BOYEdSryFzQhmkWwRDwQg2i4hVHjttFgIpFoE/FB9TaVJityQ1nTOFoDohWGze41OzixWKoqOgQiCEuFAIsVMIsVsIceMw110hhJBCiFWF3M9EpT7aZmJPp5dDfX5qPQ76fCEiKXUAqcNhaGxmlm97eq+hLPMIUp/2nTYtc7A4U4wg4Um/ezCIzSIoc8U9i2aPI2URKBTFRy7B4i8JIcqEwW+FEOuFEOfncJ8FuA24CFgErBZCLMpwnQf4IvDa6Lc/OZhe5uTIYJB/bOsA4OKlDUgJ/SlWQSBNCFZSFTpESbg3Vkdgs8RdQ8E0iyCcNI3MaU0fYJ/JNVTisCQ96Xd7A1SVONKK44YbdK9QKCYuuVgEH4vGCc4HaoCPArfmcN/JwG4p5V4pZRBYA06IhXgAACAASURBVFyW4brvAt8HcpziPvmoixaVPbjuIHOrSzixyRhS05MSJ0jKGoJYnOAEfVdy+qjZayiDa8jsHQSGayhVCAzXUHIOQZpFkFJVnHhdapdShUIx8clFCMzHvvdg9B3alPDecDQCBxNet0Tfiy8sxApghpTyr8NuQIgbhBBrhRBrOzs7c/jWxYVZXbync5Azjqumwm0csqmZQ0Op/vvpy9HRWKTvIhjRsVkEmiawWbPVESS7fTK7hsK4Uy2CDDGCxIwhE2URKBTFSS5CsE4I8TSGEDwVdeWkj75KJ5NYxE4mIYQG/Bj46kgLSSnvkFKuklKuqqmpyeFbFxemEACccVwNlVEhSM0c6hpIbvSGo5Qu1xwWy92EwnosSBybR6CP4BqyWdLTR4PpwWK33cJQKIIejVl0eYNUl6RbBC67JVaroFAoiodchODjwI3ASVLKIcCG4R4aiRZgRsLrJqAt4bUHWAI8J4TYB7wTeGwqBoxjg2o0wTvnVVHpNoqzEi0CKSVd3kBSNS9Au2cxJ4rdBMORWPwgljUUTk0fTbYIHFZLxuH1aULgsCKl0aBOSkn3YCCLa0hZBApFMZKLEJwC7JRS9gohrgG+CfTlcN8bwHFCiDlCCDtwNcY8AwCklH1Symop5Wwp5WzgVeBSKeXaUf8URY7HaaPUYaV5ViWlDmvMNZRoEXgDYQJhPdkiADo8i5kmvJQMtsQEwKIJNAFhPb2gLNU1FEiIEYQiOqGIzOgaAhgMRBgKRvCH0vcBRr3BkGo6p1AUHbkIwa+AISHEMuD/AfuBP4x0k5QyDHweeArYDjwgpdwqhLhFCHHpGPY8Kfn3C47ny+cZbSg8DiuagN4Ei6ArQ1sHgK7yJQDUebcmBZJtFi0ta8iwCOKuIUdK1pDZgjrdNWSN7iGQsarYJDWWoFAoioNcWkyEpZRSCHEZ8FMp5W+FEB/JZXEp5RPAEynvfSvLtWfnsuZk5SOnzo59rWmCCrc9KWsoVsTlST6A+zzH4ZN2Gge34rDGvWrGwJpk19BgMJweLE4YXuMLZhYCsx/Se372IjOjzeUyuYZcyjWkUBQluQjBgBDiJuBa4IxofYCtsNtSVLhtyRbBQFQIUg5gq83Om3IOM307sLkTLQKRsbLYZc+ePhoTghTX0Onzq3noM6fy0u4uXtrdRSCkc0K9J23PbrslZlUoFIriIRchuAr4EEY9QbsQYibwg8JuS1GZxSKoSXHJ2CyCTfo8rgs9g8sSP/itFi0pfTQc0QmG9djoSTDTR40AsBAiaaZxIkIIVs6qZOWsSr54XuYuqsZ9qo5AoShGcmk61w7cC5QLIS4G/FLKEWMEirFR6bYlZQ11eoMIAdNS0jatmsYmfR4OgsyX+2Pv2y1akkVgBnGTXENWC7qM1xv4QmZTulyeD9Jx2YwsJD2lNYZCoZjY5NJi4krgdeCDGANpXhNCXFHojU11Ktz2pKyhLm+AaW57rI+QidUi2CjnA0aFceL7iZXFmfz/sSll0VoCX9C4PtU1lCumyCj3kEJRXOTy6PcNjBqCDgAhRA3GDOMHC7mxqY5hESQIQWoxWRSbRdAiqzlCGQvCbyW8n+waMt0+qa4hMNpKlDltScPtjwa3w1h7MBimxHF0VoVCoTj25JI+qpkiEKU7x/sUY6DCbccf0mPB3C5vgGpPeqaOVdMAI04wL7gz4f3kYPFgIHkWARAbcmPOJDCf5J1HaxHYzFbUyiJQKIqJXA70J4UQTwkhrhdCXA/8jZSUUEX+qYhWF5uZQ13eYFaLAGBDZB7TQ/vBb8wRsluTYwS+TDGClAH2vizB4lxRU8oUiuIkl2DxvwN3AEuBZcAdUsr/r9Abm+pUxhrPGe4ho71EuhBYo51GN8l5aEg4tDH6viCsp7uGkttQJw+w7x4MJn3v0eJSQqBQFCU5OXKjw+rVwPpjSEWs31CQoWCYoWAksxBELYKN+jzjjdZ1MOdMo7I4oVjMrPjNaBFEg8UtPT6qSuxpBWW5YoqMcg0pFMVFViEQQgyQ0C008SNASinLCrYrRUIH0hBdA2Z7ifQndbO/UB+ldNmbqG5dF3s/MXsnc7A42TXU0jNEY6XrqPccdw2pWgKFopjIKgRSyvTSUcUxI9E11JmlvQTEW04DtJUsorp1PWDEDvr96XUErpQWExB3DbX2+jJWDOeKihEoFMWJyv6ZoCQGi7NVFQNJdQWHPYuhvxX6D6VVFg/rGopWF7f2+GisGItFYDxXKCFQKIoLJQQTFKfNgtOm0TMYjDecGyZrCKCzfLHxReu69MriDH2EnNa4EHR5gwTCOk2V7qPes0u5hhSKokQJwQTG6DcUjxFk6vhpZg0B9JQtBM0KrevSKouHgmFcNgtagisp5hoK67T0DAGM0SJQdQQKRTGihGACU+G20+czLIIKty0WGE4k0SKw2F1QtwRa16VVFg/4w5S5kkNC8YKyCK29PgCaph29ENgsGjaLUMNpFIoiQwnBBMZsPJethgBIEgebRYPGldC2Absmk1xD/f4QZc7k7uGJLSZaegwhGItFANEOpGo4jUJRVCghmMCYragzzSo2sSZYBHZrVAgC/dSHW5KFwBfG40y2COwWDSGMrKHWHh/lLhse59hGTai5xQpF8aGEYAJjDqfJ1l4Cki0Ch2kRADN925MmlPX7Q5S5kg95IQTO6LjKlp6hMVsDEJ1SplxDCkVRoYRgAlMZbUXdmaXzKCTXEdisAqoXgN3DjKHtSTOL+33priEwx1UaMYKmMRSTmbjtFhUsViiKDCUEE5gKtw1dgjcQpiZDMRkk1xHYLRbQNGhcQePgtqReQ5mCxWCOq9Rp6fGNqarYxG1TU8oUimJDCcEEpiKh+Vu2GEFi1lDs68aV1AztwqoH0HWJlDJjsBgMIWjv8zMUjIyphsDE7VAWgUJRbCghmMBUuuMHd3bXUIJFEO0mSuNKLDLMIrGfkK7jD+mEIjJjINhh1djT6TVuy0OMwG23MKiEQKEoKpQQTGCSLYJsweKUrCGIBYyXaXsIRwxrAMjqGjrU5wfIS4zAZbMqi0ChKDKUEExgkiyCLDECIQSWaMDYbsYLyhoYdNSyTNtDKKLT74sKQZZgsUm+gsUqRqBQFBdKCCYwiQNiqkqyD4sxM4diFgHQXXEiy8QeQhFJv984mFPTRyHeeK7UYaU8w+ejRdURKBTFhxKCCUyZy4YQ4HFah50jbNYSJNYU9FQsYa7WTniwO+4acmZwDUUbzzVWuBBCpH0+Wlx2C4GwTkTPNMpi8jIUDPO//3graRiQQlEsKCGYwFg0QZnTlrH9dCJmdXGiRdA/bamxRtuGmGsoU7DYdA3lwy0E8cE3U8099Mqebn72z11sONAz3ltRKEaNEoIJTqXbljVQbGJmDtkTLIKBqqXoUmBpX5/gGsocLAbyUkMA8VbUmQLGwbDOuv09sYlokwlvtL+Sd4r2WZJSxhoXKooPJQQTnEuWNXDhkvphr7FlsAg0p4fdsgHH4Y0jBIuNgztfFkGmKWXr9vdw08NvcvJ/PcMHfvUyf157MC/fayIx4J/aQvDP7R2c+f1n6ej3j/dWFEdBTsPrFePHV88/fsRrYq6hBIvAqmms0xcw/+DzNAd/TJX1nIxxBkfUNdRYMfZiMkgXgs6BAB/41cu47RbOX1THE2+2xzqdTiZMATAFYarR2usjoksO9weoLXOO93YUo0RZBJOAWLA4wSKwWTV+GL6SnrmX8I5D9/JP65fg5V9AOJB0r8OaX4vAFY0R+ELGgXjgyCAAt32omZ9cvYLqUntsBvNkYiAakJ+qFkFMCAOhcd6J4mhQQjAJsGWIEdg0QTfl7D71h/zPzDvYZZkPT38DfrEKNv8ZdCO7pdRRWNdQW6/hKmiIVi1Xexx0eYN5+V4TCW/UEhjMgxD8Y9thOgeKSyzNzDTvFLWIih0lBJMA0zWU1Hcoah2Edcl2ZvO9af8F1z4CznJ4+BNw59mw9zkub27itg81UzVCQDpXTCEYDBhCcKjPcANNrzDcBdWlDroyHHLb2vrp8xXv0+RAnlxD/lCEG+5Zy59eO5CPbR0zTAGYqq6xYkcJwSTAatGiQ2biQmAWmQWjlcVlTivMOxdueAHefwcM9cAfLqP6Lx/ivbXdeduLO8U11Nbrx+OwxgLVmVxDEV3ygV+9zG9e3Ju3fRxrvHkKFvf7Q0gJvb7ispqmerC82FFCMAmwaSIpYwjicQOz11CsqljTYNlV8Pk34PzvQctauP10eOQz0Ncy5r2kuoYO9fli1gBAjcfBkcEgekLBWbc3gC9hXGYxEksfHeMTsZnh1e8rrgM1HiwvXqtuKlNQIRBCXCiE2CmE2C2EuDHD518RQmwTQmwWQvxTCDGrkPuZrFgtIsktBHEhCEV0YxZBauqozQmnfgG+tNH4e8tD8LNm+Me3wNd71HtJrSNo6/UzvTwef6gudRDRJT1D8Sfe9mjKYbH5xRMxD8LBMRbS9UUFoL/IDlRTAAaURVCUFEwIhBAW4DbgImARsFoIsSjlsg3AKinlUuBB4PuF2s9kxmbRMlgEhjCEEl1DmXBVwvnfhS+sgyWXw0s/g58tz5hhlAtuW7pF0JBgEZjFcYkB4/a+SSAEefKRmwLQX2TxkphrSMUIipJCWgQnA7ullHullEFgDXBZ4gVSymellEPRl68CTQXcz6TFOoxraDAQIRDWMzacS6JiBrz/dvjUC9CwImOGUU57iYrSUDBCIByhyxtMswgAuhLiBIejFkHHQPEWIw3kqbI45hoqsgN1QAWLi5pCCkEjkFhC2hJ9LxsfB/6e6QMhxA1CiLVCiLWdnZ153OLkwGrRkhrOQVwIjgwaB25WiyCV6UuN7KIMGUa5YraiNp/0p5cnxgiMLqqJQmDOQ+gZChVt0zZvnp6ITQEoNotgqrfYKHYKKQSZWllmbEkphLgGWAX8INPnUso7pJSrpJSrampq8rjFycEFi+t53/JkjTVTSk0XzIgWQSpmhtHld8YyjPjjB6B9y4i3um1GK2qz90xDRbpFkOgGak9oS9BVhMVm4YiOL9o/KX8WQfEIgZQyb8FyxfhQyBYTLcCMhNdNQFvqRUKIdwHfAM6SUhbfKTABuGJlukctbhFEhSBDn6ER0TRYeiUsvBTeuBNe+KGRYbRsNZzzdcOdlAGX3ZhbfCilmAyg3GXDZhFJMYLDCULQORBIur4YMGsmPA4r3mAYXZdo2tG19DaFwBsY2zrHEl8oEms7XkwCpohTSIvgDeA4IcQcIYQduBp4LPECIcQK4NfApVLKjgLuZcphBotNIfDk6hrKuFiGDKOfr8yaYeS2WxkKhuPFZAmuISEEVSWOpCf/9j5/bF5yRxEGjM22CvXlTqSEoTF0VzUPUimLJwMnMS6gXEPFScGEQEoZBj4PPAVsBx6QUm4VQtwihLg0etkPgFLgz0KIjUKIx7IspxglpkXQPXiUrqFM5Jhh5IpOKWvr8zOtxJ7W7K7aY09yDR3uD3BiYzlQnJlD5uFXHxW8sbhHEusHiiVOYApBhdumgsVFSkHrCKSUT0gpF0gp50kp/zP63reklI9Fv36XlLJOSrk8+ufS4VdU5IpZWdztNYPFeRACEzPD6NMvQkNzQobRA6DrlESF4FCvL8kaMKkpjVsE3kAYbyDMksYyoEiFIHr41Ue7bnrH0Hgt0bVSLG4Ws4ZgerkLbyCMlFNrOt1kQFUWT1KEMIrMYjGCDENpxkz9iXDtwwkZRp+EO85iRXgTQ8FwWjGZSXWCEJiZRU2VbqaV2IsyhdR04Zii5w2MwTXkC+GIpgIXS3WxaRE1lDuJ6BJ/qDgzv6YySggmMVZNI6xLrJrANczM4zGTmGHk6+WLrV/jP73fJtLXklRMZlLtcdDtNdpMmIHiujInNaWO4rYIoqI3FtdQny8U6wRbPBZBVAij/9aqzUTxoYRgEmMGjD1Oa14G0w+LmWH0+Td4svELLNF38hv9ZuY7+tIurS51ENYlfb5QzCKoL3dS43EUZbA4HiNwRF+PxTUUpqnSGBJULDECU/hM669YgtyKOEoIJjFmwDgvgeKcv6mTDU0f5prgTUwT/Vz+5qfSmtlVl8aLyswagvoyJ7We4rUIjhMtNK//Bg10HXXAVEpJf5JFUBwHqmm5NMQsguLYtyKOEoJJTEwI8hkozgG3zcpGOZ/rgjfhDPbC7y5OEoMas6jMG+Bwv58ypxWX3UJNVAiKLdhY0rWJB+y3UPHWn7nb/n2C3iNHtY4vFCGsSxorXQhRRBaBaRGVjd01phgflBBMYszq4oIEioehJDr1bKOcz5HL18BQN/zuvTExqPbEG88d6vPH0i5rPI7o/IQiOkjefoEPbPksXtyEL/4Fc8Qhztz4VQiPfp6A+XNXuOyUOqxFFSNw2y1UuI0HjrG4xhTjgxKCSYx9nCwCsxW1EFC54FQjq2joSEwMYo3nBgyLoK4sLgQAnd4iyRza+Xf44xX02Gr5jP2/sK66lm/qn2JG7xvw+BeNqrBRYB78ZS5jkE+xCKLXH6bUYaXUYTxwFItLSxFHCcEkxpoQLD6WmMNpaj0Owz3VtAqu/UtMDCqCh7Fogk5vgPY+fyztstZj/N3RXwRxgjcfhPuvgbpF/Kjxx/hdtQD8034uz9R/AjbdB8/dOqolzVGdZU4bZS5b8VgEgRAeZ3wKnXINjY6WniFO+s9n2N3hHbc9KCGYxIxXjMBlM4QnqYagaWVMDLQ/XMyikn4O9/np8gZihVhxi2CCC8Ebv4WHPgEz3gnXPUZ7qCQmtqVOK4+XfxhWXAPP3wob/pjzsmZMoMxlo8xpLZoYwYA/TKnTFnMJqmDx6Nh+aIDOgQBbWtMz7I4VSggmMdbxyBoibhGk1RAkiMEdkW/T1bYHXUJdebIQTGiL4F8/hr99BY47H655EJxleAPhmFuk1GE1Csou/gnMPQce/xLseTanpU0LoNxlWgTFcaB6A2HKnFasFg2XzaJiBKNkIszjUEIwibGbweJj7BoynwwbMlQVm2JQJgf4Xu+NNNAVswjKnFYcVm1iWgRSwjPfgWduhiUfgKvvBVs8SyZmETisRhaNxQZX/gFqToD7r82pfbcZEyhzmjGC4jhQB/xxIfQ4rarx3Cgxa2fG8wFICcEkxqqNj0UQcw1layfdtJI7Z/+ICrzcZ/8eTVo3YLTFqPE46OifYMFiXYe/fRX+9b+w8qNGBbUl/jtNtwiiB6GzDD70ADg8cO8HoT+tC3sS5sHvcdoocxVP1lCSEDqtRWPJTBQ6o5bA4XGsoVFCMImxRXvWeI5xjKBpmosFdaWcPHta1mt8tcu5NngjlcLL/Ceugl5jmF2txzGxLIJICB75FKz9LZz2Jbj4x6Alt+swsmaM33Fp6hNxeSN8+AEIDMC9V4K/P+u36veHcNks2K0aZU5bbCbBRGfAH4r9/B6nTQWLR8nhftMiUK4hRQGwaePjGipz2nj6387ixKbyrNfUlDrYJOfz0chNaP5eI7W092DUIpggQhDywwPXwZsPwHnfgnd9x8iJTUDXJd5gmNJE11DqQVh/Ilz5e+jYBn++3hCXDPT5QrGajzKXrShmEkR0yWAwErMIPA6r6jU0SszYwHhW1SshmMSMS4uJHDFrCdpLlyCu+4sx4OZ37+U4R+/EsAgCA/CnD8LOJ+A9P4QzvpomAmAMoZHSOAAhg0VgMv88uOSnsOef8Nd/y1hj0O8LxzK8TPGe6HEC82dNi5EociZmESghUBSCeGXxxBWC+nInNK6E6x4BXy+f3PtF3EOHCISPvpXzmBk6An94H+x7Cd5/B5z8yayXmk+/MYvAbiUQ1gmGM7Ribr4Wzvx32HAPvPjDtI/7/SHKo/9W5r/ZRI8TpAqBx5nBIlJkJaJLur0BnDYNbyDMUHB8fndKCCYx8criY+sayoVqj9F4zswYMsXAFe5njf279LTtGZ+NDRw2eiO1bzayfpZdNezl5qFXmmARAAxmeyo+5xuw9Gr4v+/BpvuTPur3h2ICYFoGE726OCaECTESVUeQO93eALqERdONwUzj5RZVQjCJsVoEQkCJfeIJgdl4zmwvAUDjSjaefTcVYpDKP18OvQeO7aZ69sNdF0DPPvjwn2HhxSPeYvrwE2MEMMzsXiHg0p/D7DPg0c/B2y/EPjJcQ2aMwGzXMMEtAn+qRWDDGyyOIPdEwHQLLYmOah0v95ASgkmM02ah3GVD0wo8i+AoqHTbOfeEWs46vibpfefsk/hw8OvRAPLFx04MOt+Cuy4E3xG47lGYe3ZOt8UOwoQ8ehihutZqh6v+CFXzYM010LEDyGYRTGwhMH/O0oRgsZQwOEoXx6E+H+v29/Dsjg6e3HIIX3AcXYPHEDNQvKTBEILD45Q5NPEeFRV54/pTZ3N2ykE7UdA0wV3Xn5T2fq3HyZtyLs+cdAcXrfu0IQbX/xUqZhZuM20b4Y+Xg7DA9U9A/ZKcb/WmWAQlUUEY8SB0VRhWx2/eBfd+EP1jT9PvCyUEi80YwcR2s5gWUZkz2TXmDYRzTls+1OfjtFv/j0Qj4ruXLebaU2bnda8TEWURKArO3JpSzj2hbry3MSqqSu0IATvEfEjIJiqYZbD/Zfj9JWBzw8eeHJUIQIYYgekayuUAr5hpFJwNdSP/dCVO6Y+5hEqLJGsoNUZgWkSjCRhva+tHl3DzJYt4+LOnUua0svPwQP43e5Tc+cJeLvvFvwqytmkRzK8txW7Rxq3NhBICxYTCZtFoKHexqaUXGpsNMfD3FUYMdj0D91wOpXWGCFTNG/US5hOxJ+UgzDn/v2E5fPB3aB1b+Lnt51Q4DDeeRRN4imAmQWqM4GhaUZtdN9+3opHmmZXMqy0d106cqbywq5NNLX0Fcdsc7g9QVWLHbtWMwUwqWKxQGLx/RSMvvNVJW6/PEINrCyAGWx+B+66G6uMMEShvOqplzIPQ7K9kPhmPKoVywfm0n/6fnGfZwDt2fj9WY1DmmvgzCQb8YYSINxo03UGjqSXY0+mlutROhdvIJJtfU8rujsH8b/Yo2dFuWCebDvbmfe3OAX+s2WJt2fjN7FZCoJhwXLlqBrqEB9dFx1vmWwzW3wMPfsxIWf3I41BSfdRLeQNGWwiz06spCFnTR7NwYM5V/Cp8CbP23gcv/RQwnrInvEUQ7bMkRPLsi9FUF+/u8DKvpjT2en5tKV3eAH1D4/+zd3sDsYrfNwvQJrpjIBDLnKv1OJRrSKEwmVnl5rT5Vdz/xsF4GmKCGETufi93/+15/rHt8OgXf+U2eOzzRovoax8xgrZjwBuIt5eAeKruaFtD9PvDfD98Fb1zL4Vnvg1bHopaBON/GA5Hvz+UNO9iVDESQErJns5B5tXGhcAUhd2d4+8e2hm1BjQBm1ryLwSH+/3UmhaBxxkLHh9rlBAoJiRXnTST1l4fL+3pir3XWbaYu+f9FG9vN+967ePc+dizuQ+6lxKe/W946uuw6DJYvQbs7jHvc8AfjqWOgpENlbHf0Aj0+UJINPou+AnMPBUe+TQr2T7hs4a8CS2oISFYnKMQdg8G6fOF0iwCMFxG443pFjr7+Fo2t/Tm/t9bDkR0SZc3SG2ZKQQO+nwh/KFjnzqrhEAxIblgcR0VbhtrXje6kr68u4t3//h5vrvezp1z/pcam58fDX2Tvbu2j7yYrsOTNxkTw5ZfAx+4y8jlzwOpFgGY/XZG9yRvPvmXezzGrIOKWXzh8LeoGNyXl30WioGEFtRgWERC5B4sNoPC8xMsgqZKF3aLxp4JEDDe2T7AtBI75y2spXcoREuPL29rdw8GiOgy7hqKCsJ4NJ9TQqCYkDisFi5f0cTT29r5xf/t4tq7Xqe61MFTXz6Tr11/NUOrH6JMDFLz0OVGRXA2ImHDFfTar+CdnzWqei35K59JfSKGYRrPDUN/LA3TCu5p8OE/IzULPwjeAt7OvO0336QKoaYJSu25W0TmU/+8mpLYe1aLxpzqkgmRObTj8ADH13lY1mS4EDe15C9gbLaTiLmGooIwHgFjJQSKCctVJ80gFJH88Om3OOf4Wh757KkcV+cBYNr8d3DLtP9GC/YbRWeZxCAcgAc/ChvvhbNvggv+C7T8/iefOJTGpMQcVzkK+n3GOmbQmWlzeHTRT6iSvcg/XQXBoXxtOa8M+ENphWNGv6HcLKLdHV5cNkvaNLv5taXj7hrSdcmuwwMcX+9hQZ0Hu0XjzTzGCcwn/xpPPFhsvH/sA8ZKCBQTluPrPXz0tNl87fwF3HHtyrQDZ/6yM7jafxO6P4MYBAeN9NDtj8EF/w1n35ixjfRYMQa3JwuBx2HFO8psHyPomrzOUM0yvhj6PLSth4c+AfrEa7uQSQhH04p6T+cgc2tK0tqgzKsp4cCRoXHxl5sc7BliKBjhhHoPdqvGwoayMVkEoYjOszs6YnEGsy6hriweLAZlESgUaXz7ksV8/tzjMvZLeveiOrbIuTzRfDsEEsTA1wv3XI7c+xw73nErnPLZgu3PG0gOFsPR9eQ3htIkC12Zy8Y/9FX0nvVd2Pk3I9A9wej3h9MEbDRzi/d0eJPiAybzakvRJezvHtkS+uFTO3luZ8ew13zjkTf50dM7c9qTiRkoPiHaGXRpYzlbWvuPuqHeXf96m4/+7g2eje61I2YRGEJQVWLHoolx6UCqhEBRtMyvLWVuTQlrDk4zGsWZYvD7i5Gt6/iK/mXe++Is1u3vKcj3l1JmDhYfRU/+xD5DJubrtuOvg1M+D6/dDq/8cmybziOBcIRgWM8QI7HlFCweCoZp7fUlZQyZxFJIR4gTbG3r4xfP7ubWv+/ImtHzdtcg9752gDtf3DuquowdhwYQAhbUGXtZ2lSONxBmb9foi90C4Qh3vfQ2EK+POdzvp8Jtw2E1ak80TVBdah+XxnNKCBRFzfmL6nl1bzd9lYtjYiC7dvM129d50XYa08udfPG+DfTlLlqFWQAAEKdJREFUKR9/wB/ib5sPoesSf0gnostYNbFJqcN6VHUE6RaB2W8oDO/+Liy81LAKtj06th8iT6S2lzAxhtOM/Pve22kcqNmEQIiRheCPrxruwB3tA1kLvv7wyj6EAH9I57GNbSPuy2Tn4X5mTnPjjtaGLI0GjDcfhXvo0Y1tHO4PsLSpnGe2ddAzGDSKyTzOpOtqPU7lGlIoRsu7F9UR1qXhGmhYTuiTz/PVaT/n8cGF3HHdSn6+egWH+/3c+NDmjE+MuzsG+NvmQzl9Lykl/99Dm/ncn9bz23+9zUAgeTqZSanDymAgPKqc8/6EecUm8Q6kISPIffkd0HQSPHwDHHw957ULRbzzarKAeXJ0jZnB4EyuIZfdQmOFa9iAcZ8vxF82tPGeE+txWDUeWHsw7ZrBQJgH17ZwydIGFk4v4/430q/Jxo52I2PIZH5tKS6bhc0jBIx1XbK/ezDp9a+f38Oi6WXcevlSghGdxza10TEQiKWMmtSNU5sJJQSKombFjAqqSx188y9bOPeHz3H2nXt5+ICbWy8/keaZlayYWcnXLjiev29p5/cv70u699W93bz/tpf53J/W5yQGf918iCfebKfW4+AHT+1kfdTllBYjcFrRJfhGEehMrdAFYmMrY9XFNhesvg88041AePc4TXGLMpDFIih15DalbE+HF03A7OrMhX3zaoZvPvfQuhZ8oQifPXs+Fy2p59GNbWnB5Yc3tDIQCPORU2dz1aom3mztY2vbyJk//lCEfV2DnFAfFwKLJljSWDaiRXDLX7dx1g+e47+f2E5El/xzRwd7Ogf51FlzWdRQxuKGMh5c10JHf7zPkEmNx6myhhSK0aJpgu9cuph3L6xjcWM5y2aU8733LeHy5ngTuRvOmMtZC2q4+fFtXPvb19jS2sdTW9u57q7XqSt3srSpnBsf3szBI9kDkx0Dfv7j0S0sm1HBX79wOmUuKzc+/CZAxqwZyL3Ngq4bsYY011CmmQQl1XDNQ0al9L1XwGA3ezq9tPflfnis23+Ep7a253x9NgZShvKYeJw2hoIRIiMEVXd3epk5zR3zkacyv7aUvV1edF3SMxjk/z24iUc3tgKGdfbHV/ezYmYFSxrLufKkGQz4w/x9S1zQpZT84eV9nNhYTvPMCt63ohG7VeOBHKyC3R1edBkPFJs0z6xkc0sfr+3tznjfP7Yd5ncv72NBXSm/fmEv19/9Or94djdNlS7ee+J0AK5YaQjSoT5/8oQ+jBTS7sEg4UiGmdcFpKBCIIS4UAixUwixWwhxY4bPHUKI+6OfvyaEmF3I/SgmJ+9dOp3/vWo5P1+9gl9+eCXXvHNW0ueaJrjjupV8870L2dLax8U//xef+eM6Fk0v48+fOoXbPtQMwBfu20Aow/8BpZR8/eEtDAUj/OiDS6ktc/I/H1hK71B21xDk1m9o1+EBPnL360gJMyqTc+nNdTcc6CEQTnjSrZpntMjoa6Xzzsu57CfPcO6PnuP3L+8bMaPlvtcPcOWvX+VT96zjO49vHdOBY9YKZKojgJGFcE/HYMb4gMm8mlL8IZ2X9nTx/l++xANrW/jSmo188y9v8uzODvZ2DXJt9N/6nXOqmDnNneT6eWVPN7s6vFx3yiyEEFS47Vy0pJ5HNrRmTEs1g9cRXcYyho5PsAgAPn3WPGZVufnE79eyra0/6bP2Pj///uAmFjeU8fgXTud/PnAir+09wqaDvXzyjLmxGpHLljdisxhZcLUpFkFtmQMpocsbHPZ3l28KNqFMCGEBbgPeDbQAbwghHpNSbku47ONAj5RyvhDiauB/gOGnhSsUR4HDauETZ8zlypNmcOcLeznc7+fmSxfjtlupLLFz6+VL+dyf1vPNR7Zwzgk1OKwWhoIR1h/oYe2+I2xq6eMb71nI/FrjYDhvYR1XnzSDNW8cpMKdHiyGaFaIy4Y/rNNyZIj93UMc7BkiFDEO6y5vgEc2tFJit/Ctixfx/hWNSetYNBH7Hlvb+vn2JYs4+/haAAINq3iw8Rus3v9t7ir7DbdVf5NvP7aVv20+xCXLpnPgyBD7uocodVg5fX41p82v5q6X3uaOF/Zy1oIa5lSXcPdL+9jd4eUXH2qOuaFGYjAQ5nC/n7ZeP8+/ZVQ8p9VRRF+/3T3ITOnGGj30pDRKOdw2C0II3u4aTBtVmogZO7j+7jcod9m4/4Z38n87Ovj1C3tZ8/pBppXYeU/0KVvTBFeuauKHT7/FpoO97Ose5I4X9lLptnHJsobYmletmsGjG9t4cF0Lp8+vxh+O8GaLYSG+sKuLYFjHqgmcNgsOq8bsqpKkPVWW2PnDx9/BFb96mevuep2HP3MqM6vcBMM6X75/A8Gwzs9Xr8BhtXDVSTNZUOfh8U2HuHLVjNga00rsnHdCHU9ubc9gERivW3qGqPE4sByjMbMin02UkhYW4hTgZinlBdHXNwFIKf874Zqnote8IoSwAu1AjRxmU6tWrZJr164tyJ4VU5v/+MsW7nk1uULZYdVY1lTBmQuq+czZ85P+j+kPRXj+rU7OX1QXa8MM8Ma+I3zw9lcyfg9NEHsytGqC961o5KvvXkBVqSPj9QDPv9XJdx7byt6uQdx2C3arhq5L+v1h7jr+Nc7d/1NkxUwGwha6vQF0XSKEwGYRRHRJRJcI4jMOqkpsCIwn+iODxpOnhnFIC4j+j/GXeR9IdB0kyf/XtAioK3OiJfz8vlCE7sHhn2gFAomkwu2g1JHZNRSR0Nrrx6YJajwOrNHf/VAowpHBIB6njXKnFaJ7iuiSQ33xXkAWAeVOazTrx7hGSkmnN5D0OxFINCFwWjWs0d+ZrutYNQ23Peo0STqSJGFdGhaRJPqTGH+77RbsFi32/eK/Lpm0TljX8YciuOwWLAnr6lISiFor5v5sFg2L+et9z/dh5fXD/m6zIYRYJ6VclfGzAgrBFcCFUspPRF9fC7xDSvn5hGu2RK9pib7eE72mK2WtG4AbAGbOnLly//5hessoFEeJlJIDR4YYDEQIhCPYLJrRWsA6Og9qOKKz5o2D+EPGGnarRmOFi9lVJTRUOONtJEZBMKxz/9qD7O8aJBjRCUV0zjuhjnctrDXqCw6+BhjVq2EdnDYLApAI+v0hOgcCOO1WmipcSRXWR4ZCHOrzG4IhjXiFJH5+SQk6oCFw2Cw4bRacNg2X3UqJ3YrLbkkSATAO5NZeH+FIBF2CnnLGhCOSUERHSsnC6Z6sMQKkpNcXosRhwZbSGkSXOkIIREy1jL93d3jxh3WmV7iodNuje0tQNqDXF6bLG8SqCTRNo9RhobLEERXBxJ8lee3Y6+h7RwaDvNXhxapp2Cwa5W4bs6pK0vaUbR1fKIzLnmyJSQQ7///27j7GjqqM4/j3l1YKVKVA1WjbuG2yUpEESqRpEU2DRHlprDEQqiY2kQQlvGk0pGpion9hNL4lhIQAgqRWY0HcEEI15aUBQl8tpbUgBQyslrYGqPWVtj7+cc4107v3bmF7pxPn/D7JzZ0zOzv3nDx359k5M3PO7v38M9+afChgzvSp+aKyYO5imDV2ru83oqlEcBnw8a5EMD8irq1ssz1vU00E8yOi95UYfEZgZjYR4yWCOi8WjwKzKuWZQPfTHP/bJncNnQS8UmOdzMysS52JYAMwLGm2pOOApcBI1zYjwLK8fCnw4HjXB8zMbPBqu2soIg5KugZYDUwCbo+I7ZK+DWyMiBHgNuAuSTtJZwJL66qPmZn1VlsiAIiI+4H7u9Z9s7L8L+CyOutgZmbj85PFZmaFcyIwMyucE4GZWeGcCMzMClfbA2V1kbQXmOijxdOBvxxxq3Zxm8vgNpfhaNr83ojoObjT/10iOBqSNvZ7sq6t3OYyuM1lqKvN7hoyMyucE4GZWeFKSwS3NF2BBrjNZXCby1BLm4u6RmBmZmOVdkZgZmZdnAjMzApXTCKQdKGkZyTtlLS86frUQdIsSQ9J2iFpu6Tr8/pTJP1W0rP5/eSm6zpIkiZJ+p2k+3J5tqR1ub2/yMOgt4akaZJWSXo6x3phATH+cv5Ob5O0UtLxbYuzpNsl7ckzN3bW9Yyrkh/n49lWSWcfzWcXkQgkTQJuAi4CTgc+Len0ZmtVi4PAVyLi/cAC4OrczuXAmogYBtbkcptcD+yolL8D/CC391XgikZqVZ8fAQ9ExFzgTFLbWxtjSTOA64APRsQZpGHtl9K+ON8BXNi1rl9cLwKG8+tK4Oaj+eAiEgEwH9gZEc9HxOvAz4ElDddp4CJiV0Rszsv7SQeIGaS23pk3uxP4ZDM1HDxJM4FLgFtzWcD5wKq8Sdva+3bgI6S5PIiI1yPiNVoc42wycEKeyfBEYBcti3NErGXsDI394roE+GkkTwDTJL17op9dSiKYAbxUKY/mda0laQiYB6wD3hURuyAlC+CdzdVs4H4I3ECaYx3gVOC1iDiYy22L9RxgL/CT3B12q6SptDjGEfEn4HvAi6QEsA/YRLvj3NEvrgM9ppWSCNRjXWvvm5X0VuBu4EsR8dem61MXSYuBPRGxqbq6x6ZtivVk4Gzg5oiYB/ydFnUD9ZL7xZcAs4H3AFNJXSPd2hTnIxno97yURDAKzKqUZwJ/bqgutZL0FlISWBER9+TVuzunjfl9T1P1G7APAZ+Q9EdSd9/5pDOEabkLAdoX61FgNCLW5fIqUmJoa4wBLgBeiIi9EXEAuAc4l3bHuaNfXAd6TCslEWwAhvNdBseRLjSNNFyngcv947cBOyLi+5UfjQDL8vIy4NfHum51iIivRcTMiBgixfTBiPgs8BBwad6sNe0FiIiXgZcknZZXfRT4PS2NcfYisEDSifk73mlza+Nc0S+uI8Dn8t1DC4B9nS6kCYmIIl7AxcAfgOeAbzRdn5raeB7p9HArsCW/Lib1m68Bns3vpzRd1xravgi4Ly/PAdYDO4FfAlOart+A23oWsDHH+V7g5LbHGPgW8DSwDbgLmNK2OAMrSddADpD+47+iX1xJXUM35ePZU6Q7qib82R5iwsyscKV0DZmZWR9OBGZmhXMiMDMrnBOBmVnhnAjMzArnRGDFkvS3/D4k6TMD3vfXu8qPD3L/ZoPkRGAGQ8CbSgR5RNvxHJYIIuLcN1kns2PGicAMbgQ+LGlLHvd+kqTvStqQx3r/AoCkRXm+h5+RHuJB0r2SNuWx8q/M624kjZS5RdKKvK5z9qG8722SnpJ0eWXfD1fmGViRn6I1q93kI29i1nrLga9GxGKAfEDfFxHnSJoCPCbpN3nb+cAZEfFCLn8+Il6RdAKwQdLdEbFc0jURcVaPz/oU6cngM4Hp+XfW5p/NAz5AGjPmMdJYSo8Ovrlmh/MZgdlYHyON47KFNIz3qaQJQADWV5IAwHWSngSeIA0CNsz4zgNWRsShiNgNPAKcU9n3aET8hzQ8yNBAWmN2BD4jMBtLwLURsfqwldIi0rDP1fIFwMKI+Iekh4Hj38C++/l3ZfkQ/vu0Y8RnBGawH3hbpbwauCoP6Y2k9+XJX7qdBLyak8Bc0vSgHQc6v99lLXB5vg7xDtJsY+sH0gqzCfJ/HGZpFM+DuYvnDtKcwEPA5nzBdi+9p0F8APiipK3AM6TuoY5bgK2SNkcaGrvjV8BC4EnSSLE3RMTLOZGYNcKjj5qZFc5dQ2ZmhXMiMDMrnBOBmVnhnAjMzArnRGBmVjgnAjOzwjkRmJkV7r+zyENrAv27tAAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "\"\"\"\n", "# @file name : ce_loss.py\n", "# @author : TingsongYu https://github.com/TingsongYu\n", "# @date : 2019-10-07 10:08:00\n", "# @brief : 人民币分类模型训练\n", "\"\"\"\n", "import os\n", "import numpy as np\n", "import torch\n", "import torch.nn as nn\n", "from torch.utils.data import DataLoader\n", "import torchvision.transforms as transforms\n", "import torch.optim as optim\n", "from PIL import Image\n", "from matplotlib import pyplot as plt\n", "\n", "from utils.lenet import LeNet\n", "from utils.my_dataset import RMBDataset\n", "from utils.common_tools import transform_invert, set_seed\n", "\n", "\n", "set_seed(1) # 设置随机种子\n", "rmb_label = {\"1\": 0, \"100\": 1}\n", "\n", "# 参数设置\n", "MAX_EPOCH = 10\n", "BATCH_SIZE = 16\n", "LR = 0.01\n", "log_interval = 10\n", "val_interval = 1\n", "\n", "# ============================ step 1/5 数据 ============================\n", "\n", "split_dir = os.path.abspath(os.path.join(\"data\", \"rmb_split\"))\n", "if not os.path.exists(split_dir):\n", " raise Exception(r\"数据 {} 不存在, 回到lesson-06\\1_split_dataset.py生成数据\".format(split_dir))\n", "train_dir = os.path.join(split_dir, \"train\")\n", "valid_dir = os.path.join(split_dir, \"valid\")\n", "\n", "norm_mean = [0.485, 0.456, 0.406]\n", "norm_std = [0.229, 0.224, 0.225]\n", "\n", "train_transform = transforms.Compose([\n", " transforms.Resize((32, 32)),\n", " transforms.RandomCrop(32, padding=4),\n", " transforms.RandomGrayscale(p=0.8),\n", " transforms.ToTensor(),\n", " transforms.Normalize(norm_mean, norm_std),\n", "])\n", "\n", "valid_transform = transforms.Compose([\n", " transforms.Resize((32, 32)),\n", " transforms.ToTensor(),\n", " transforms.Normalize(norm_mean, norm_std),\n", "])\n", "\n", "# 构建MyDataset实例\n", "train_data = RMBDataset(data_dir=train_dir, transform=train_transform)\n", "valid_data = RMBDataset(data_dir=valid_dir, transform=valid_transform)\n", "\n", "# 构建DataLoder\n", "train_loader = DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)\n", "valid_loader = DataLoader(dataset=valid_data, batch_size=BATCH_SIZE)\n", "\n", "# ============================ step 2/5 模型 ============================\n", "\n", "net = LeNet(classes=2)\n", "net.initialize_weights()\n", "\n", "# ============================ step 3/5 损失函数 ============================\n", "loss_functoin = nn.CrossEntropyLoss() # 选择损失函数\n", "\n", "# ============================ step 4/5 优化器 ============================\n", "optimizer = optim.SGD(net.parameters(), lr=LR, momentum=0.9) # 选择优化器\n", "scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1) # 设置学习率下降策略\n", "\n", "# ============================ step 5/5 训练 ============================\n", "train_curve = list()\n", "valid_curve = list()\n", "\n", "for epoch in range(MAX_EPOCH):\n", "\n", " loss_mean = 0.\n", " correct = 0.\n", " total = 0.\n", "\n", " net.train()\n", " for i, data in enumerate(train_loader):\n", "\n", " # forward\n", " inputs, labels = data\n", " outputs = net(inputs)\n", "\n", " # backward\n", " optimizer.zero_grad()\n", " loss = loss_functoin(outputs, labels)\n", " loss.backward()\n", "\n", " # update weights\n", " optimizer.step()\n", "\n", " # 统计分类情况\n", " _, predicted = torch.max(outputs.data, 1)\n", " total += labels.size(0)\n", " correct += (predicted == labels).squeeze().sum().numpy()\n", "\n", " # 打印训练信息\n", " loss_mean += loss.item()\n", " train_curve.append(loss.item())\n", " if (i+1) % log_interval == 0:\n", " loss_mean = loss_mean / log_interval\n", " print(\"Training:Epoch[{:0>3}/{:0>3}] Iteration[{:0>3}/{:0>3}] Loss: {:.4f} Acc:{:.2%}\".format(\n", " epoch, MAX_EPOCH, i+1, len(train_loader), loss_mean, correct / total))\n", " loss_mean = 0.\n", "\n", " scheduler.step() # 更新学习率\n", "\n", " # validate the model\n", " if (epoch+1) % val_interval == 0:\n", "\n", " correct_val = 0.\n", " total_val = 0.\n", " loss_val = 0.\n", " net.eval()\n", " with torch.no_grad():\n", " for j, data in enumerate(valid_loader):\n", " inputs, labels = data\n", " outputs = net(inputs)\n", " loss = loss_functoin(outputs, labels)\n", "\n", " _, predicted = torch.max(outputs.data, 1)\n", " total_val += labels.size(0)\n", " correct_val += (predicted == labels).squeeze().sum().numpy()\n", "\n", " loss_val += loss.item()\n", "\n", " valid_curve.append(loss_val)\n", " print(\"Valid:\\t Epoch[{:0>3}/{:0>3}] Iteration[{:0>3}/{:0>3}] Loss: {:.4f} Acc:{:.2%}\".format(\n", " epoch, MAX_EPOCH, j+1, len(valid_loader), loss_val, correct / total))\n", "\n", "\n", "train_x = range(len(train_curve))\n", "train_y = train_curve\n", "\n", "train_iters = len(train_loader)\n", "valid_x = np.arange(1, len(valid_curve)+1) * train_iters*val_interval # 由于valid中记录的是epochloss,需要对记录点进行转换到iterations\n", "valid_y = valid_curve\n", "\n", "plt.plot(train_x, train_y, label='Train')\n", "plt.plot(valid_x, valid_y, label='Valid')\n", "\n", "plt.legend(loc='upper right')\n", "plt.ylabel('loss value')\n", "plt.xlabel('Iteration')\n", "plt.show()\n", "\n", "# ============================ inference ============================\n", "\n", "test_dir = \"test_data\"\n", "\n", "test_data = RMBDataset(data_dir=test_dir, transform=valid_transform)\n", "valid_loader = DataLoader(dataset=test_data, batch_size=1)\n", "\n", "for i, data in enumerate(valid_loader):\n", " # forward\n", " inputs, labels = data\n", " outputs = net(inputs)\n", " _, predicted = torch.max(outputs.data, 1)\n", "\n", " rmb = 1 if predicted.numpy()[0] == 0 else 100\n", "\n", " img_tensor = inputs[0, ...] # C H W\n", " img = transform_invert(img_tensor, train_transform)\n", " plt.imshow(img)\n", " plt.title(\"LeNet got {} Yuan\".format(rmb))\n", " plt.show()\n", " plt.pause(0.5)\n", " plt.close()\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:交叉熵代码演示?" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Cross Entropy Loss:\n", " tensor([1.3133, 0.1269, 0.1269]) tensor(1.5671) tensor(0.5224)\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "import numpy as np\n", "\n", "# fake data\n", "inputs = torch.tensor([[1, 2], [1, 3], [1, 3]], dtype=torch.float)\n", "target = torch.tensor([0, 1, 1], dtype=torch.long)\n", "\n", "# ----------------------------------- CrossEntropy loss: reduction -----------------------------------\n", "\n", "# def loss function\n", "loss_f_none = nn.CrossEntropyLoss(weight=None, reduction='none')\n", "loss_f_sum = nn.CrossEntropyLoss(weight=None, reduction='sum')\n", "loss_f_mean = nn.CrossEntropyLoss(weight=None, reduction='mean')\n", "\n", "# forward\n", "loss_none = loss_f_none(inputs, target)\n", "loss_sum = loss_f_sum(inputs, target)\n", "loss_mean = loss_f_mean(inputs, target)\n", "\n", "# view\n", "print(\"Cross Entropy Loss:\\n \", loss_none, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:通过手算校验pytorch的交叉熵代码的正确性代码?" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "第一个样本loss为: 1.3132617\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "import numpy as np\n", "\n", "# fake data\n", "inputs = torch.tensor([[1, 2], [1, 3], [1, 3]], dtype=torch.float)\n", "target = torch.tensor([0, 1, 1], dtype=torch.long)\n", "\n", "# --------------------------------- compute by hand\n", "\n", "idx = 0\n", "\n", "input_1 = inputs.detach().numpy()[idx] # [1, 2]\n", "target_1 = target.numpy()[idx] # [0]\n", "\n", "# 第一项\n", "x_class = input_1[target_1]\n", "\n", "# 第二项\n", "sigma_exp_x = np.sum(list(map(np.exp, input_1)))\n", "log_sigma_exp_x = np.log(sigma_exp_x)\n", "\n", "# 输出loss\n", "loss_1 = -x_class + log_sigma_exp_x\n", "\n", "print(\"第一个样本loss为: \", loss_1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:交叉熵的weight作用代码示例?" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "weights: tensor([1., 2.])\n", "tensor([1.3133, 0.2539, 0.2539]) tensor(1.8210) tensor(0.3642)\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "import numpy as np\n", "\n", "# fake data\n", "inputs = torch.tensor([[1, 2], [1, 3], [1, 3]], dtype=torch.float)\n", "target = torch.tensor([0, 1, 1], dtype=torch.long)\n", "\n", "# ----------------------------------- weight -----------------------------------\n", "# def loss function\n", "weights = torch.tensor([1, 2], dtype=torch.float)\n", "# weights = torch.tensor([0.7, 0.3], dtype=torch.float)\n", "\n", "loss_f_none_w = nn.CrossEntropyLoss(weight=weights, reduction='none')\n", "loss_f_sum = nn.CrossEntropyLoss(weight=weights, reduction='sum')\n", "loss_f_mean = nn.CrossEntropyLoss(weight=weights, reduction='mean')\n", "\n", "# forward\n", "loss_none_w = loss_f_none_w(inputs, target)\n", "loss_sum = loss_f_sum(inputs, target)\n", "loss_mean = loss_f_mean(inputs, target)\n", "\n", "# view\n", "print(\"weights: \", weights)\n", "print(loss_none_w, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:交叉熵的weight作用手算校验代码示例?" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0.3641947731375694\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "import numpy as np\n", "\n", "# fake data\n", "inputs = torch.tensor([[1, 2], [1, 3], [1, 3]], dtype=torch.float)\n", "target = torch.tensor([0, 1, 1], dtype=torch.long)\n", "\n", "weights = torch.tensor([1, 2], dtype=torch.float)\n", "weights_all = np.sum(list(map(lambda x: weights.numpy()[x], target.numpy()))) # [0, 1, 1] # [1 2 2]\n", "\n", "mean = 0\n", "loss_sep = loss_none.detach().numpy()\n", "for i in range(target.shape[0]):\n", "\n", " x_class = target.numpy()[i]\n", " tmp = loss_sep[i] * (weights.numpy()[x_class] / weights_all)\n", " mean += tmp\n", "\n", "print(mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:如何实现负对数似然函数中的负号功能?\n", "- $$\\ell(x, y)=L=\\left\\{l_{1}, \\ldots, l_{N}\\right\\}^{\\prime}, \\quad l_{n}=-w_{y_{n}} x_{n, y_{n}}$$\n", "- `torch.nn.NLLLoss(weight: Optional[torch.Tensor] = None, size_average=None, ignore_index: int = -100, reduce=None, reduction: str = 'mean')`\n", "- weight:各类别的loss设置权值\n", "- ignore_index:忽略某个类别\n", "- reduction:计算模式,可为none/sum/mean\n", " - none:逐个元素计算\n", " - sum:所有元素求和,返回标量\n", " - mean:加权平均,返回标量\n", "\n", "Q:NLLLoss代码示例?" ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "weights: tensor([1., 1.])\n", "NLL Loss tensor([-1., -3., -3.]) tensor(-7.) tensor(-2.3333)\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "import numpy as np\n", "\n", "# fake data\n", "inputs = torch.tensor([[1, 2], [1, 3], [1, 3]], dtype=torch.float)\n", "target = torch.tensor([0, 1, 1], dtype=torch.long)\n", "\n", "# ----------------------------------- 2 NLLLoss -----------------------------------\n", "weights = torch.tensor([1, 1], dtype=torch.float)\n", "\n", "loss_f_none_w = nn.NLLLoss(weight=weights, reduction='none')\n", "loss_f_sum = nn.NLLLoss(weight=weights, reduction='sum')\n", "loss_f_mean = nn.NLLLoss(weight=weights, reduction='mean')\n", "\n", "# forward\n", "loss_none_w = loss_f_none_w(inputs, target)\n", "loss_sum = loss_f_sum(inputs, target)\n", "loss_mean = loss_f_mean(inputs, target)\n", "\n", "# view\n", "print(\"weights: \", weights)\n", "print(\"NLL Loss\", loss_none_w, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:二分类交叉熵是怎样?\n", "- $$l_{n}=-w_{n}\\left[y_{n} \\cdot \\log x_{n}+\\left(1-y_{n}\\right) \\cdot \\log \\left(1-x_{n}\\right)\\right]$$\n", "- `torch.nn.BCELoss(weight: Optional[torch.Tensor] = None, size_average=None, reduce=None, reduction: str = 'mean')`\n", "- weight:各类别的loss设置权值\n", "- ignore_index:忽略某个类别\n", "- reduction:计算模式,可为none/sum/mean\n", " - none:逐个元素计算\n", " - sum:所有元素求和,返回标量\n", " - mean:加权平均,返回标量\n", "- 注意:输入值取值在[0,1]\n", "\n", "Q:BCELoss代码示例?" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "weights: tensor([1., 1.])\n", "BCE Loss tensor([[0.3133, 2.1269],\n", " [0.1269, 2.1269],\n", " [3.0486, 0.0181],\n", " [4.0181, 0.0067]]) tensor(11.7856) tensor(1.4732)\n" ] } ], "source": [ "inputs = torch.tensor([[1, 2], [2, 2], [3, 4], [4, 5]], dtype=torch.float)\n", "target = torch.tensor([[1, 0], [1, 0], [0, 1], [0, 1]], dtype=torch.float)\n", "\n", "target_bce = target\n", "\n", "# itarget\n", "inputs = torch.sigmoid(inputs)\n", "\n", "weights = torch.tensor([1, 1], dtype=torch.float)\n", "\n", "loss_f_none_w = nn.BCELoss(weight=weights, reduction='none')\n", "loss_f_sum = nn.BCELoss(weight=weights, reduction='sum')\n", "loss_f_mean = nn.BCELoss(weight=weights, reduction='mean')\n", "\n", "# forward\n", "loss_none_w = loss_f_none_w(inputs, target_bce)\n", "loss_sum = loss_f_sum(inputs, target_bce)\n", "loss_mean = loss_f_mean(inputs, target_bce)\n", "\n", "# view\n", "print(\"weights: \", weights)\n", "print(\"BCE Loss\", loss_none_w, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:BCELoss手算校验代码示例?" ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "BCE inputs: tensor([[0.7311, 0.8808],\n", " [0.8808, 0.8808],\n", " [0.9526, 0.9820],\n", " [0.9820, 0.9933]])\n", "第一个loss为: 0.31326166\n" ] } ], "source": [ "inputs = torch.tensor([[1, 2], [2, 2], [3, 4], [4, 5]], dtype=torch.float)\n", "target = torch.tensor([[1, 0], [1, 0], [0, 1], [0, 1]], dtype=torch.float)\n", "\n", "inputs = torch.sigmoid(inputs)\n", "\n", "idx = 0\n", "\n", "x_i = inputs.detach().numpy()[idx, idx]\n", "y_i = target.numpy()[idx, idx] #\n", "\n", "# loss\n", "# l_i = -[ y_i * np.log(x_i) + (1-y_i) * np.log(1-y_i) ] # np.log(0) = nan\n", "l_i = -y_i * np.log(x_i) if y_i else -(1-y_i) * np.log(1-x_i)\n", "\n", "# 输出loss\n", "print(\"BCE inputs: \", inputs)\n", "print(\"第一个loss为: \", l_i)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:结合Sigmoid与二分类交叉熵\n", "- $$l_{n}=-w_{n}\\left[y_{n} \\cdot \\log \\sigma\\left(x_{n}\\right)+\\left(1-y_{n}\\right) \\cdot \\log \\left(1-\\sigma\\left(x_{n}\\right)\\right)\\right]$$\n", "- `torch.nn.BCEWithLogitsLoss(weight: Optional[torch.Tensor] = None, size_average=None, reduce=None, reduction: str = 'mean', pos_weight: Optional[torch.Tensor] = None)`\n", "- pos_weight:正样本的权值,用于均衡正负样本\n", "- weight:各类别的loss设置权值\n", "- ignore_index:忽略某个类别\n", "- reduction:计算模式,可为none/sum/mean\n", " - none:逐个元素计算\n", " - sum:所有元素求和,返回标量\n", " - mean:加权平均,返回标量\n", "- 注意:网络最后不加sigmoid函数\n", "\n", "Q:BCEwithLogitsLoss的代码示例?" ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "weights: tensor([1., 1.])\n", "tensor([[0.3133, 2.1269],\n", " [0.1269, 2.1269],\n", " [3.0486, 0.0181],\n", " [4.0181, 0.0067]]) tensor(11.7856) tensor(1.4732)\n" ] } ], "source": [ "inputs = torch.tensor([[1, 2], [2, 2], [3, 4], [4, 5]], dtype=torch.float)\n", "target = torch.tensor([[1, 0], [1, 0], [0, 1], [0, 1]], dtype=torch.float)\n", "\n", "target_bce = target\n", "\n", "# 不需要加sigmoid\n", "# inputs = torch.sigmoid(inputs)\n", "\n", "weights = torch.tensor([1, 1], dtype=torch.float)\n", "\n", "loss_f_none_w = nn.BCEWithLogitsLoss(weight=weights, reduction='none')\n", "loss_f_sum = nn.BCEWithLogitsLoss(weight=weights, reduction='sum')\n", "loss_f_mean = nn.BCEWithLogitsLoss(weight=weights, reduction='mean')\n", "\n", "# forward\n", "loss_none_w = loss_f_none_w(inputs, target_bce)\n", "loss_sum = loss_f_sum(inputs, target_bce)\n", "loss_mean = loss_f_mean(inputs, target_bce)\n", "\n", "# view\n", "print(\"weights: \", weights)\n", "print(loss_none_w, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:BCEwithLogitsLoss的pos_weight代码示例?" ] }, { "cell_type": "code", "execution_count": 36, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "pos_weights: tensor([3.])\n", "tensor([[0.9398, 2.1269],\n", " [0.3808, 2.1269],\n", " [3.0486, 0.0544],\n", " [4.0181, 0.0201]]) tensor(12.7158) tensor(1.5895)\n" ] } ], "source": [ "inputs = torch.tensor([[1, 2], [2, 2], [3, 4], [4, 5]], dtype=torch.float)\n", "target = torch.tensor([[1, 0], [1, 0], [0, 1], [0, 1]], dtype=torch.float)\n", "\n", "target_bce = target\n", "\n", "# itarget\n", "# inputs = torch.sigmoid(inputs)\n", "\n", "weights = torch.tensor([1], dtype=torch.float)\n", "pos_w = torch.tensor([3], dtype=torch.float) # 3\n", "\n", "loss_f_none_w = nn.BCEWithLogitsLoss(weight=weights, reduction='none', pos_weight=pos_w)\n", "loss_f_sum = nn.BCEWithLogitsLoss(weight=weights, reduction='sum', pos_weight=pos_w)\n", "loss_f_mean = nn.BCEWithLogitsLoss(weight=weights, reduction='mean', pos_weight=pos_w)\n", "\n", "# forward\n", "loss_none_w = loss_f_none_w(inputs, target_bce)\n", "loss_sum = loss_f_sum(inputs, target_bce)\n", "loss_mean = loss_f_mean(inputs, target_bce)\n", "\n", "# view\n", "print(\"pos_weights: \", pos_w)\n", "print(loss_none_w, loss_sum, loss_mean)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# 3.损失函数(二)\n", "\n", "Q:如何计算inputs与target之差的绝对值?\n", "- $$l_{n}=\\left|x_{n}-y_{n}\\right|$$\n", "- `torch.nn.L1Loss(size_average=None, reduce=None, reduction: str = 'mean')`\n", "\n", "Q:如何计算inputs与target之差的平方?\n", "- $$l_{n}=\\left(x_{n}-y_{n}\\right)^{2}$$\n", "- `torch.nn.MSELoss(size_average=None, reduce=None, reduction: str = 'mean')`\n", "\n", "Q:L1Loss和MSELoss代码示例?" ] }, { "cell_type": "code", "execution_count": 38, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "input:tensor([[1., 1.],\n", " [1., 1.]])\n", "target:tensor([[3., 3.],\n", " [3., 3.]])\n", "L1 loss:tensor([[2., 2.],\n", " [2., 2.]])\n", "MSE loss:tensor([[4., 4.],\n", " [4., 4.]])\n" ] } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import numpy as np\n", "from utils.common_tools import set_seed\n", "set_seed(1) # 设置随机种子\n", "\n", "inputs = torch.ones((2, 2))\n", "target = torch.ones((2, 2)) * 3\n", "\n", "loss_f = nn.L1Loss(reduction='none')\n", "loss = loss_f(inputs, target)\n", "\n", "print(\"input:{}\\ntarget:{}\\nL1 loss:{}\".format(inputs, target, loss))\n", "\n", "loss_f_mse = nn.MSELoss(reduction='none')\n", "loss_mse = loss_f_mse(inputs, target)\n", "\n", "print(\"MSE loss:{}\".format(loss_mse))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:平滑的L1Loss是怎样?\n", "- $$\\operatorname{loss}(x, y)=\\frac{1}{n} \\sum_{i} z_{i}$$\n", "- $$z_{i}=\\left\\{\\begin{array}{ll}\n", "0.5\\left(x_{i}-y_{i}\\right)^{2}, & \\text { if }\\left|x_{i}-y_{i}\\right|<1 \\\\\n", "\\left|x_{i}-y_{i}\\right|-0.5, & \\text { otherwise }\n", "\\end{array}\\right.$$\n", "- ![](http://anki190912.xuexihaike.com/20201004190650.png?imageView2/2/h/300)\n", "- `torch.nn.SmoothL1Loss(size_average=None, reduce=None, reduction: str = 'mean')`\n", "\n", "Q:SmoothL1Loss代码示例" ] }, { "cell_type": "code", "execution_count": 40, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEHCAYAAACjh0HiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOzdd1yV5eP/8dfFVpa4cCG4J4qgpjlxp6a5d2qWaY6yMrVhZVmmLU3TzMpMc++dJu4NgnvgABG3IEPZ1++PG/v684N6gHMxr+fjcR4P4Jxzv6+7Y1ycc9/3+xJSSjRN07T8yyK7B6BpmqZlLz0RaJqm5XN6ItA0Tcvn9ESgaZqWz+mJQNM0LZ+zyu4BpFfRokWlh4dHhp4bGxuLvb29eQeUTfS+5Ex5ZV/yyn6A3pdH/P3970gpi6V1X66bCDw8PDh69GiGnrtz506aN29u3gFlE70vOVNe2Ze8sh+g9+URIUTI0+7THw1pmqblc3oi0DRNy+f0RKBpmpbP6YlA0zQtn9MTgaZpWj6nbCIQQtgJIQ4LIYKEEKeEEJ+n8RhbIcRSIUSwEOKQEMJD1Xg0TdO0tKl8RxAPtJBS1ga8gHZCiAZPPGYIECGlrAj8AHyjcDyapmlaGpRNBNIQk/qtdertyc7rzsCfqV+vAFoKIYSSAcVFUTZkBaSkKNm8pmmaUkd+w+7hTSWbFirXIxBCWAL+QEVglpRy3BP3nwTaSSnDUr+/CLwgpbzzxOOGAkMBXF1dfZYsWZLusbje8KPa2R+57NGXEI9eGdqfnCQmJgYHB4fsHoZZ6H3JefLKfkDe2Jcidw7jeXIyl4q3I7T68Axtw9fX119KWTfNO6WUym9AIcAPqPnEz08BZR77/iJQ5Fnb8vHxkRmSkiKv/9xJyk+dpQz+N2PbyEH8/Pyyewhmo/cl58kr+yFlHtiXu5ek/NpNyjlN5K7tWzO8GeCofMrv1Sw5a0hKGQnsBNo9cVcY4AYghLACnIF7SgYhBOcrD4diVWHFELgfpiRG0zTNbBLjYNmrxtc9F5BiaaMkRuVZQ8WEEIVSvy4AtALOPvGwdcDA1K+7AztSZy4lUiztoNdfkJwIywZCUoKqKE3TtMzbPBZuHIcuc8HFQ1mMyncEJQE/IcRx4AiwTUq5QQgxSQjRKfUxvwFFhBDBwLvAeIXjMRStBJ1nwrWj8M/HyuM0TdMy5NgiCFgATd6DKk9+mGJeytpHpZTHgTpp/HziY1/HAT1UjeGparwCV0fAwVngVh88u2f5EDRN057qxgnY+C6Uawq+HymPy79XFrf+HNwawLrRcPtcdo9G0zTNEHffOC5QwAW6/Q4Wlsoj8+9EYGkNPf4Am4KwdADExzz/OZqmaSpJCWvegshQ6DEfHNJcR8bs8u9EAOBUCrr9BncvwPq3jRdB0zQtu+z/Cc5ugNaToOyTRQzq5O+JAKB8M+MzuJMr4Mi87B6Npmn51ZV9sP0zqN4ZGryVpdF6IgBo/C5UbgdbJkBYxpbB1DRNy7Dom7BiMBQuB51mgqKmnafREwGAhQV0mQNOJY3rC2LvZveINE3LL5KTYMVrEBcFPReAnVOWD0FPBI8UcDFehNhbsOp1SEnO7hFpmpYf7PgCQvbCyz+Ca41sGYKeCB5Xqg68NBUu7oBdU7N7NJqm5XVnN8K+H8FnMNTunW3D0BPBk3wGQe0+sOsbuLA9u0ejaVpede8SrB4OJb2g3ZRsHYqeCJ4kBHT4HopXNz4iirya3SPSNC2vSXwIS181ft/0XADWdtk6HD0RpMWmoFFOl5IMywdCUnx2j0jTtLxk0/tw8wR0/RVc3LN7NHoieKoiFaDzLLjmD1vVd31ompZPBPwFxxZC07FQuU12jwbQE8GzVe8EDUfCkV/h+PLsHo2mabnd9ePGu4HyzaH5hOwezX/0RPA8rT6Dsg1h/Wi49eRyCpqmaSZ6GAnLBkCBwka1TRaUyZlKTwTPY2kN3f8AGwfjRYyPzu4RaZqW2zwqk7sfBj3/BPui2T2i/4+eCEzhVBK6/w53g43aal1Op2laeuybDuc2QpsvjTVQchg9EZiqXBNo8QmcWgWH52b3aDRNyy2u7IV/P4caXeCFYdk9mjTpiSA9Gr0DlV8yziK6eiS7R6NpWk4XfQOWD4bCFaDTT1leJmcqPRGkh4UFdJltrGOwfCDE3snuEWmallM9KpNLiDGuS7J1zO4RPZWeCNKrgIvxosbegZVDdDmdpmlp+/dzCNkHL0+H4tWyezTPpCeCjChZG9pPg0s7YWf2doRompYDnVkP+2dA3SFQq2d2j+a59ESQUd6vglc/2D0VLmzL7tFompZT3L1onCpayhvafZ3dozGJnggySgho/y24esKqN4zFpjVNy98SHsCyV42LxXr+CVa22T0ik+iJIDNsChovdkqy8eLrcjpNy7+kTC2TOwVd50Ghstk9IpMpmwiEEG5CCD8hxBkhxCkhxNtpPKa5EOK+ECIw9TZR1XiUKVIBXpkN4ceMNY81TcufAhZA4CJo9gFUapXdo0kXK4XbTgLek1IGCCEcAX8hxDYp5eknHrdHStlR4TjUq9YRXhxtHBwq2yBXHBzSNM2MwgNh01io0AKajcvu0aSbsncEUsrrUsqA1K+jgTNAaVV5z3MvNoEV5xNISk5RE9DyU3BvBOvfhltn1GRompbzPIwwPhq2L2p8JKSoTO6XXRcJj1Hz+0vILOjNEUJ4ALuBmlLKqMd+3hxYCYQB4cD7UspTaTx/KDAUwNXV1WfJkiXpHsOh60nMDoqnfTlrelaxycBePJ9N/D3qHh1DkpU9/j7fkmxVUEkOQExMDA4ODsq2n5X0vuQ8eWU/QPG+yBRqnvyKwveOEej1FVHOVZTEPPr91aq0pL9nxvbF19fXX0pZN807pZRKb4AD4A90TeM+J8Ah9ev2wIXnbc/Hx0dm1MCZW6T7uA1y68nrGd7Gc13eI+VnLlIufVXKlBRlMX5+fsq2ndX0vuQ8eWU/pFS8L7u/k/JTJykPzlEWceFmtKz+yWbZ9ed9ctu/OzK8HeCofMrvVaVnDQkhrDH+4l8kpVyVxiQUJaWMSf16E2AthFDWz9q3qg2epZ15b3kQIXdj1YR4NIaWE+H0Gjg0R02GpmnZ7/Ju2PEF1OwG9YcqiYiNT2L4Qn/srC2Z1dcbKws1XUUqzxoSwG/AGSnl9095TInUxyGEqJ86nruqxmRjKfi5nzcWQjBsYQBxiYrqIRq9DVU6wD8fQ+hBNRmapmWfqOtGj1CRivDyDCVlclJKPlx9gou3Y5jRpw4lnNUtcK/yHUEjYADQ4rHTQ9sLIYYJIR51sXYHTgohgoAZQO/UtzDKuBUuyI+9vDhzPYpP1pxUEyIEvPIzOLvB8kEQc1tNjqZpWS85EVYMNi4e6/kX2Ko5/rDwYAhrA8N5t3VlGlVUu5CNstNHpZR7gWdOk1LKmcBMVWN4Gt+qxRnVoiI/7QimrocLveopuPCjQCHouQB+a22U0w1YnaOWptM0LYO2fwahB4zlJotXVRJxLDSCSRtO06Jqcd5qXlFJxuPy7ZXF77SqTOOKRflk7SlOXruvJqRkLaOG4vIu8PtKTYamaVnn9Fo4MBPqvQGe3ZVE3ItNYMSiAFyd7PihpxcWio4LPC7fTgSWFoLpvb0oYm/DW4sCuP8gUU2Q9wCo0x/2fAvnt6rJ0DRNvTvBsGYElPaBtpOVRCSnSN5ZGsidmARm9/PBuaC1kpwn5duJAKCIgy0z+3oTHvmQ95YHkpKi6PBE+2+hhCesGgoRIWoyNE1T51GZnKU19FBXJvfTjgvsPn+bzzrVwLOMs5KMtOTriQDAx92FjzpUY/uZW8zZfVFNiHUB46CSlMY/psQ4NTmappmflLDxXbh1GrrNg0JuSmJ2nb/N9H8v0NW7NH3qq8l4mnw/EQAMetGDjrVK8u3Wc+y/qGj5ycLloMscuB4IW8arydA0zfz850PQYmg+Hiq2VBJxLfIh7yw5RhVXRya/4onI4rWN9UQACCH4plstyhW1Z/TiY9y4r+gv9qrtodE74P8HBKW/JkPTtCwWfgw2fwAVWkLTD5REJCSlMGJRAInJkp/7eVPAJuvPLtQTQSp7Wyvm9PfhQUIyI/8OIFFVOV2LT8CjCax/x+gt1zQtZ3pwL7VMrjh0/RUs1Py6nLzxNIFXI/m2Ry3KF8uefic9ETymkqsjX3f15GhIBN9sPqsmxNLKOP/YzhmWDoC4qOc/R9O0rJWSAquHGVcQ91wA9kWUxKwLCufPAyG83rgc7WqWVJJhCj0RPKGzV2kGNnRn3t7LbDpxXU2Ioyv0+AMirsDat4yDUZqm5Rx7v4cLW401h8v4KIm4cDOa8SuPU8/DhXEvqbkwzVR6IkjDRx2q4+VWiA9WHOfS7Rg1Ie4vQqvP4Mx6ODBLTYamael3aSf4TYaa3aHe60oiYuOTGL4ogII2lszs6421Zfb+KtYTQRpsrCyY1c8ba0vB8IUBPEhIUhP04iio2hG2TYSQA2oyNE0zXVQ4rBgCRSrBy9OVlcmNX3WCS6llcq5O6srkTKUngqcoXagA03vX4fytaD5afRIlXXiPyulc3FPL6W6ZP0PTNNMkJxr/HyY+hF7qyuT+3H+F9UHhvN+2Ci9WUFsmZyo9ETxD08rFeKdlZVYfu8aiQ6FqQuycjYNRcZFGrW2yoncfmqY927aJcPUQdP4JiqlZaSwgNILJm87QqlpxhjWtoCQjI/RE8ByjWlSkWeViTFp/muNhkWpCSnhCh+/hyh7js0lN07LWqdVw8Geo/6ax0IwCd2PiGbEogBLOdnzXI2vK5EylJ4LnsLAQ/NjLi2KOtgxfGEBEbIKaoDr9wHugcbbCuc1qMjRN+193LsDakVCmHrT5UknEozK5u7FZWyZnKj0RmMDF3oaf+3lzOzqeMcsUltO9NBVK1obVb8K9y2oyNE37PwmxxvU8VrapZXI2SmKm/3uBPRfuMKlTDWqWzroyOVPpicBEtd0K8cnL1dl57jYz/YLVhFjbGccLAJYP1OV0mqaSlLBhDNw+a5TJOZdWErPz3C1+2nGB7j5l6FUva8vkTKUngnTo/0JZutQpzQ/bz7PngqLlJ108oMtcuB5kdJxomqbG0d/h+FLw/RAqtFASERbxgHeWBlLF1ZEvOtfM8jI5U+mJIB2EEEzuUpNKxR0YvfgY4ZEP1QRVaQeN34WAPyHwbzUZmpafXQswWoArtoYm7yuJiE9KZsSiAJKTJXP6+2RLmZyp9ESQTgVtrJjd34fEZMlbiwJISFJUTuf7kVFOt2EM3DihJkPT8qMH92DZQHBwha5zlZXJfbnhDEFh95nWozYeRe2VZJiLnggyoEIxB6Z2r0Xg1Ui+2nRGTYilFXT/HewKGQ2IcYrWVda0/CQlxVgpMOYG9PwTChZWErM28Bp/HQxhaNPytKtZQkmGOemJIIPae5ZkSONyzN9/hXVB4WpCHIpDj/nG8pZrdDmdpmXanu8geJtRJldaTZnc+ZvRjF95gvoehfmgrZoL08xNTwSZMP6lqtR1d2H8yuNcuBmtJsS9IbSeBGc3wP6f1GRoWn5wcYdxwaZnT6g7RElETHwSwxb6Y29rxcy+dbDK5jI5U+WOUeZQ1pYWzOzrTUEbS4YvCiA2XlE9RMMRUK0TbP8MruxTk6Fpedn9MFj5OhSrCi//qKxMbtyK44TcfcDMvnUongPK5EylbCIQQrgJIfyEEGeEEKeEEG+n8RghhJghhAgWQhwXQnirGo8qJZztmNG7DpduxzB+1Ql15XSdZxmnlq4YDNE3zZ+haXmUSEktk0uKN8rkbNQcuP1j3xU2nrjO2LZVaFBezUI2qqh8R5AEvCelrAY0AEYIIao/8ZiXgEqpt6HAbIXjUebFikV5r00V1geF8+f+K2pC7JyMf8RxUbDiNURKspocTctjKlycD2FHoPNMKFpJSYZ/yD2+2nSG1tVdebNpeSUZKimbCKSU16WUAalfRwNngCcv3esMLJCGg0AhIUT2rdeWCcObVaBl1eJM3nSGgNAINSGuNaDjDxCyl3KXF6rJ0LS85ORKylzbAC8MhxpdlETciYlnxKJjlHYpwLc9aufYi8aeRSj5KOPJECE8gN1ATSll1GM/3wBMkVLuTf3+X2CclPLoE88fivGOAVdXV58lS5ZkaBwxMTE4OKhbHDo2UfLZ/ockS/jsxQI42aj5B1H53M+Uur6VEzU/5G7RF5RkZCXVr0tWyiv7khf2o2BsGN4B7xFl58YJn6+RFuYvekuRkm+PxnEhIoWPG9jh7qT2orHMvC6+vr7+Usq6ad4ppVR6AxwAf6BrGvdtBBo/9v2/gM+ztufj4yMzys/PL8PPNdWJsEhZ6aNNst+vB2VScoqakISHMmpaHSm/cpPy7kU1GVkoK16XrJJX9iXX70dctJQz60v5TXm5b8sKZTHfbj0r3cdtkEsPhyrLeFxmXhfgqHzK71WlZw0JIayBlcAiKeWqNB4SBjzewlQGUHRSftaoWdqZSZ1qsDf4DtO3n1cTYm3HqRrjjIPIy141VlTSNM0gJWx4B+6ch+6/kWCr5sCt39lb/LQjmJ51y9Azh5bJmUrlWUMC+A04I6X8/ikPWwe8mnr2UAPgvpTyuqoxZZVe9dzo4VOGGTuC8TunZvnJuAKpl8ffOAGbxirJ0LRc6cg8OLHcKJMr31xJxNV7Rplc9ZJOTOpcU0lGVlL5jqARMABoIYQITL21F0IME0IMS33MJuASEAz8CrylcDxZRgjBF6/UpFpJJ8YsDeTqvQdqgiq3NQqzjv0FAX+pydC03CTMH7ZMgEptofF7SiLik5IZ8XcAKVIyu783dtY5t0zOVFaqNiyNA8DPPFqa+rnVCFVjyE521pbM7ufNyz/tZcTfASwf1hBbKwX/YHw/NE6N2/S+sahNyVrmz9C03ODBPWMdD6eS0GWOsjI5Y9na+8wd4IN7kZxdJmcqfWWxQh5F7fm2Z22Oh91n0vrTakIsLKHbb1CgMCwbAA8VrausaTlZSgqsegNibhorjSkqk1t9LIxFh0J5s1l52tTI+WVyptITgWJta5TgzWblWXQolFUBYWpCHIoZ5XT3w3Q5nZY/7Z4GwdvhpW+gtJqCgrM3opiw6gQvlCvM2Da5o0zOVHoiyAJj21ThhXKF+XD1Cc7eiHr+EzKi7AvQ+gs4txH2TVeToWk5UfB22Pk11OoNPoOVRETHJTJ8YQCOdtb8lIvK5EyVt/Ymh7KytOCnvnVwtLNm+MIAouMS1QQ1GA7VX4F/P4cre9VkaFpOEnkVVr4BxasZV90rKpP7YMVxQu89YFZfb4o75p4yOVOZNBEIIdyFEK1Svy4ghHBUO6y8p7ijHTP71CH03gM+WHFcXTldp5+gcHlYPhiib5g/Q9NyiqQEo0wuORF6/gU2BZXE/Lb3MptP3mBcuyrUL6fm2EN2e+5EIIR4A1gB/JL6ozLAGpWDyqteKF+Ece2qsPnkDX7be1lNiJ2T8T9FQgyseA2SFVVja1p2++cjuHY0tUyuopKIo1fuMWXzWdrWcOWNJrmvTM5UprwjGIFxTUAUgJTyAlBc5aDysjealKdtDVe+3nyWI1fuqQlxrQ4df4SQfcbHRJqW15xYAYfnQoMRUOMVJRF3YuIZ8XcAZVwKMC2XlsmZypSJIF5KmfDoGyGEFaBPS8kgIQTTetTGzaUAIxYFcDs6Xk1Q7V7GKkz7Z8CZDWoyNC073DoL60aDWwNoreYPneQUyejFx4h8kMjP/XxwsjN/YV1OYspEsEsI8SFQQAjRGlgOrFc7rLzNyc6a2f19iIpLZNTiAJKSU9QEtfsaSnnDmuFw96KaDE3LSvExRr+WTUHjlGlLNb+gv992jv0X7/LlKzWpXspJSUZOYspEMB64DZwA3sSohfhY5aDyg2olnfjyFU8OXrrHd9sUldNZ2ULPP42Lzpa9CgmKqi40LStICetHw90L0P134wpiBf49c5NZfhfpXc+NHnVzd5mcqZ47EUgpU6SUv0ope0gpu6d+rT8aMoPuPmXoU78ss3deZNtpRctPFioLXX+Fm6eMGgr90mm51eFf4eRKaPExlGuqJOLqvQeMWRpIjVJOfNaphpKMnMiUs4YuCyEuPXnLisHlB5++XJ2apZ14d1kgoXcV/cVeqTU0HQuBiyBggZoMTVPp6hHY+iFUbgeNxiiJiEtMZvgifwBm9/PJE2VypjLlo6G6QL3UWxNgBqDXSTQTo5zOBwshGLbQn7hERWsRNx8P5X2NyurwQDUZmqZC7F3jegGnUkrL5D5ff5qT16L4vqcXZYuouSYhpzLlo6G7j92uSSl/BFpkwdjyDbfCBfmhV21OX4/i07Wn1IRYWEK3eWBf1Dhe8FDRusqaZk4pybDqdYi9DT0XQAEXJTEr/MNYfDiU4c0r0Kq6q5KMnMyUj4a8H7vVTV1LQF9ZbGYtqroy0rciS49eZdmRq2pC7IsaZ1pEXYPVw43GRk3LyXZ9Axd3QPupUMpLScSZ61F8tPoEDcsX4b3WlZVk5HSmrEfw3WNfJwFXgJ5KRpPPjWldmWNXI/hk7UlqlHaiRiln84e41Yc2k2HLONj3IzR51/wZmmYOF7bDrqlQuy94D1QSERWXyPCF/jgXsGZGn7xXJmcqUz4a8n3s1lpK+YaU8lxWDC6/sbQQTO9dB5eCNgxfGMD9h4rK6V54E2p0gR1fwOXdajI0LTMiQ42PhFxrQIfv1JXJLT/O1YiHzOrnTTFHW7Nn5BZPfUcghHjmn4rPWIdYy4SiDrbM6leHXr8c5L1lQcwd4IOFhZn/J3hUTnfzlNFH9OYeZedka1q6JcXDsoHG8YGeC5SVyc3bc5ktp27wcYdq1PPIm2VypnrWOwLH59w0RXzcC/Nh+2psP3OTX3YrOlPX1tH4nywhFlYMNhocNS0n2PohhAdA51lQpIKSiMOX7zFly1leqlmCIY3LKcnITZ76jkBKqdvKstHgRh74h0YwbetZvNwK0bBCEfOHFK8GL88w3oJv/wzaTjZ/hqalx/HlcGQevDgKqndSEnErOo6RfwdQtnBBpnavlafL5ExlyllDdkKIEUKIn4UQvz+6ZcXg8jMhBN90q4VHUXtGLT7Grag4NUG1ekC9N+DATDi9Tk2Gppni1hmjQqLsi9DyMyURSckpjF58jKi4RGb398Yxj5fJmcqUQ+R/ASWAtsAujPUIolUOSjM42Foxp78PsfFJjPg7gERV5XRtJ0NpH2O94zvBajI07Vnio2HpALBxgB5/gKUpJzSm33fbznPw0j0mv+JJ1RJ5v0zOVKZMBBWllJ8AsVLKP4EOgKfaYWmPVHZ1ZEo3T45ciWDqlrNqQqxsocefRpOjLqfTspqUsG4U3LtolMk5llASs+30TWbvvEif+mXp5lNGSUZuZcpE8OgoYqQQoibgDHgoG5H2Pzp7lebVhu78uucyW05eVxNSyA26/Qq3TsPGd3U5nZZ1Dv0Cp1ZDy4lQromSiNC7D3h3WSA1Szvx6cvVlWTkZqZMBHOFEC7AJ8A64DTwzfOelHos4ZYQ4uRT7m8uhLgvhAhMvU1M18jzmY86VKO2WyHeX36cS7dj1IRUbAXNxkHQYvCfryZD0x539bCx5GSV9tDoHSURCcmS4Yv8sRAi35XJmcqUieAPKWWElHKXlLK8lLK4lPKX5z+N+UC75zxmj5TSK/U2yYRt5lu2Vpb83M8ba0vBW4sCiE9W9Bd7sw+gQgvY/AGEH1OToWkAsXeM6wWcy8Ars5VcNAaw8EwCp8Kj+KFXbdwK568yOVOZMhFcFkLMFUK0FOk4z0pKuRtQtChv/lS6UAF+7F2HczejWXAqASXLQlhYQtd5YF/cOF7wQL+EmgIpycbFjA/uppbJFVISs+zoVXaHJTHStyItqua/MjlTief9MhFCFABeBnoDPhjLVC6RUu597saF8AA2SClrpnFfc2AlEAaEA+9LKdOs3hRCDAWGAri6uvosWbLkedFpiomJwcHBIUPPzUnWBCewJjiRQTVsaO6m5vQ3x6hz1Dn2IREuXpzw/AiEug6WvPK6QN7ZF9X74XF5ER4hyzhbZSQ3SrZWkhESlcyXB+Mo7ygZ18AeizxwvUBmXhdfX19/KWXdNO+UUpp8A1yABUCyiY/3AE4+5T4nwCH16/bABVO26ePjIzPKz88vw8/NSZKTU2SHaZtlpQ83yaCrEeqCDv4i5adOUu6api5D5p3XRcq8sy9K9+PcVuPf1eq3lEVEPkiQTafukC9M3i7XbtmhLCerZeZ1AY7Kp/xeNenPPCFEMyHEz0AAYIcZ2kellFFSypjUrzcB1kKIopndbn5gYSF4s5YtRR2McrrIBwlqguq/ATW7gd9kuLRTTYaWv0SEwKo3wNUTOnyrJEJKydjlQVyLeMisfnVwss397wRUM2mpSuAdYA9QU0rZU0q5MrPBQogSj445CCHqp47lbma3m1842gh+7u/Dreg4xiwNJCVFwfECIYwKiiKVYMUQiAo3f4aWfyTFw/KBIFOg559gXUBJzNzdl/jn9E0mtK+Gj3v+LpMzlSnvCGpLKbtIKRdLKWNN3bAQYjFwAKgihAgTQgwRQgxLXdgGoDtwUggRhLH8Ze/Uty+aibzcCjGxY3X8zt3m552Krgi2dYBef0HiQ2O5QF1Op2XUlvHGmWivzFZWJnfo0l2mbj1HB8+SvNbIQ0lGXvTc67illFEZ2bCUss9z7p8JzMzItrX/07+BO0dDIvhu23m83FxoXEnBp2vFqkCnGbByCGybCO2+Nn+GlrcFLYWjv0Ojt6FaRyURt6LiGLn4GO6FCzKlm6cuk0uH/LkcTx4ihODrrp5UKu7A6CXHuH7/oZogz+5Q/004+LNxFaimmermaVj/Nrg3hhZqrhtNSk5h5OJjxMQlMbu/jy6TSyc9EeQBBW2smN3fh/jEZEYsCiAhSVE5XZsvoUw9WDsS7lxQk6HlLXFRsGwA2DkZPUKKyuSm/XOOw5fv8VXXmlQpoZdLSS9TDha/LYRwEobfhBABQog2WTE4zXQVijkwtXttAkIj+WrTGTUhVjbQY75RUrd0gLGojaY9jZSwbiTcuwzd/wBHNVb2zZcAACAASURBVBd0/XPqBr/sukS/F8rSpY4uk8sIU94RvJZ6nKANUAwYDExROiotQzrUKsngRh7M33+F9UGKzvBxLgPd5sHts7BhjC6n057u4Gw4vRZafQoejZREhNyN5b3lQdQq48xEXSaXYaZMBI+OuLTH6B0KeuxnWg4z4aVq+Li7MH7lcYJvKVo2okILaD4BjqceANS0J4UehG2fQNWO8OJoJRFxickMWxiAhRDM6uuNrZUuk8soUyYCfyHEPxgTwVYhhCOg6ENoLbNsrCyY1dcbO2tLhi0MIDY+SU1Q07FGW+mW8XAtQE2GljvF3DZONXZ2M9YdVnT2zidrTnLmehQ/9vLSZXKZZMpEMAQYD9STUj4ArDE+HtJyqBLOdszoU4dLt2OYsOqEonI6C+j6Kzi4Gg2SupxOA6NMbuVr8DDCuP5EUZnc0iOhLPcPY3SLivhWLa4kIz8xZSJoCJyTUkYKIfoDHwP31Q5Ly6xGFYvyXpsqrAsK56+DIWpCChY2VjaLvg6rhkKKfqOY7/lNhsu7ocN3UELNQoYnr93nk7WnaFKpKG+3qqwkI78xZSKYDTwQQtQGPgBCMIrntBxueLMKtKxanC82nOZYaISakDI+xgVmwdtgz3dqMrTc4dwW499AnQFQp7+SiPsPE3lrUQBF7G34sZcXlhb6cKU5mDIRJKVWP3QGpksppwP6RN1cwMJC8H1PL1yd7BixKIB7sYrK6eq9Dp49jL8GL/qpydBytogrsHqo8S6g/TQlESkpkveWBREe+ZCZfb0p4mCrJCc/MmUiiBZCTAAGABuFEJYYxwm0XMC5oDWz+/lwJzaBt5ccI1lZOd10o4pi5RC4f838GVrOlRhnLGIkgZ5/KSuT+2X3JbafuclHHYwz4zTzMWUi6AXEY1xPcAMoDaiZ8jUlPMs483mnGuy5cIcZ/yq6ItjG3vgl8KhhMknRuw8t59kyDq4HQZc5ULickogDF+8ybetZOtQqyaAXPZRk5GfPnQhSf/kvApyFEB2BOCmlPkaQy/Su50Y37zLM2HGBneduqQkpVhk6/QRhR4xzyLW8L3Ax+M+HxmOganslEbei4hi1+BjlitrzTbdaukxOAVMqJnoCh4EeGAvSHBJCdFc9MM28hBB8+UpNqrg68s7SQMIiHqgJqtkVXhgOh+bAyUwvW6HlZDdPGVeXezQB34+VRCQmpzDy72PExhtlcg62arqK8jtTPhr6COMagoFSyleB+oD+cy8XKmBjyZz+PiQnS0YsCiA+KVlNUOtJUKY+rBsNt8+rydCyV9x9o2/KzlltmdzWcxy+co8p3Typ7KrPUVHFlInAQkr5+GcJd018npYDeRS1Z1qP2gSF3eeLDafVhDxeTrdsAMTHqMnRsoeUsHaEcaZQjz/AQc0FXVtO3mDu7ksMaOBOZ6/SSjI0gym/0LcIIbYKIQYJIQYBG4FNaoelqdSuZgmGNi3PwoOhrDmm6Awf59LQ7Te4fQ42vKPL6fKSA7PgzHpo/Tm4v6gk4vKdWMYuD6K2WyE+7lhNSYb2f0w5WDwWmAvUAmoDc6WU41QPTFPrg7ZVqF+uMBNWneD8TVXldL7g+xGcWA5H5qnJ0LJWyH5jlbpqL0PDkUoiHiYkM3yhP5aWgll96+gyuSxg0kc8UsqVUsp3pZRjpJR6eao8wMrSgpl96mBva8Wwv/yJjlO0FnGT96BSG9gyAcL81WRoWSP6JiwfDC7uysrkpJR8vOYk525G82MvL8q46DK5rPDUiUAIES2EiErjFi2EyNA6xlrOUtzJjpl96xBy7wHjVh5XV07X5RdwLGlcX6DL6XKn5CTjYsG4+8b1InbOSmKWHLnKyoAwRreoRPMqukwuqzx1IpBSOkopndK4OUopnbJykJo6DcoX4YO2Vdh04ga/77uiJqRgYej5J8TchFVv6HK63MjvS7iyBzp+DyVqKok4ee0+n64zyuRGt6ykJENLmz77R2No0/K0qe7K15vOcPSKor/YS3tDuykQvB126wvTc5Wzm2DvD+A9ELz6Kom4/yCRYQv9KWpvw/TedXSZXBbTE4GGEIJpPWpT2qUAI/4O4E5MvJqguq9BrV6w82tjQtByvnuXYfUwKFkbXpqqJCIlRfLuskBuRsUxq583he1tlORoT6cnAg0A5wJGOV3kg0RGL1ZYTtfxByhWFVa+AZFXzZ+hmc+jMjkB9FwA1nZKYmbvusi/Z2/xcYfq1Cmry+Syg7KJQAjxuxDilhDi5FPuF0KIGUKIYCHEcSGEt6qxaKapXsqJL1+pyf6Ld/l+2zk1ITb2xspVyYnGcoa6nC7n2jwWbhyHLnPBxUNJxP6Ld/jun3O8XLsUrzZ0V5KhPZ/KdwTzgXbPuP8loFLqbSjGAjhaNutR143e9dyY5XeR7advqgkpWgk6z4RrR+Gfj9RkaJlS4vq/ELDAOP23yrP+N864G/fjGL34GOWLOTClq6cuk8tGyiYCKeVu4FlHHjsDC6ThIFBICFFS1Xg0033WqQY1Szvx7rJAQu8qKqer8Qo0GAGH51L85m41GVrG3DhBpQtzoFxT44JABYwyuQAeJCQzp7839rpMLlsJJeeOP9q4EB7ABinl/5xvJoTYAEyRUu5N/f5fYJyU8mgajx2K8a4BV1dXnyVLlmRoPDExMTg4OGTouTmN6n25/SCFT/c/pFhBCz56wQ4bS/P/tSZSkvAK/Bj7mEsE+HzLA/uyZs/Iarn935hlUiw+/u9hkRSHf70fSbRRs/j84rPxbL2SxLDatjQoqXYSyO2vyeMysy++vr7+Usq6ad4ppVR2AzyAk0+5byPQ+LHv/wV8nrdNHx8fmVF+fn4Zfm5OkxX7sv30Dek+boMctyJIXcj9azL+Szcpf6onZVy0upwskqv/jaWkSLm4r5SfF5b+a35WFrPxeLh0H7dBTlxzQlnG43L1a/KEzOwLcFQ+5fdqdp41FAa4PfZ9GSA8m8aipaFlNVfeal6BJUeusvyoojN8nEpxuvr7cPcCrB+ty+my0/6f4OwGaD2JKGc1RW+XbsfwwYrjeLkV4qMO1ZVkaOmXnRPBOuDV1LOHGgD3pZTXs3E8WhrebV2ZFysU4eM1JzkdrqZZJNKllvFZ9MmVcPhXJRnac1zZB9s/g+qdocFbSiIeJCQxfGEA1paCWf28sbHSZ6/nFCpPH10MHACqCCHChBBDhBDDhBDDUh+yCbgEBAO/Amr+9WmZYmVpwYw+dShU0Jrhi/y5/1BROV3jd6FyO9j6IVw9oiZDS1v0DVgx2FhvuNNMdWVyq09y/lY003vXoXQhNQvcaxmj8qyhPlLKklJKayllGSnlb1LKOVLKOan3SynlCCllBSmlp0zjILGWMxR1sGVWX2+uRTxk7PIgheV0c8AptZwu9o75M7T/lZwEK16DuCjjojE7NTVifx8OZdWxa7zTsjJNKxdTkqFlnH5vppmkrkdhJrSvxj+nbzJ39yU1IQVcjF9Gsbdh5euQomgpTe3/7JgEIfvg5R/BtYaSiONhkXy+7jTNKhdjVIuKSjK0zNETgWay1xp50MGzJFO3nuPgpbtqQkrVMTptLvnBrm/UZGiGsxth33TwGQy1eyuJiHyQwPCFARRztOXHXl5Y6DK5HElPBJrJhBBM6eaJe+GCjPz7GLei4tQE+QyC2n1g11S4oMvplLh3CVYPh5JeRiusAikpkjFLA7kVbZTJuegyuRxLTwRaujjaWTO7vw+x8UmMXHyMpGQFawsIAR2+h+LVYdXrEBlq/oz8LPEhLH3V+O+ssEzu553B+J27zcSO1fFyU3NhmmYeeiLQ0q1KCUe+7urJ4cv3mLZVVTldQaOcLiUZlg2EJEXV2PnRpvfh5gno+qux7KQC+4Lv8P2283T2KkX/BrpMLqfTE4GWIa/UKU3/BmX5Zfcltpy8oSakSAVjbdzwAOO0Ui3zAv6CYwuh6Vio3EZJxKMyuQrFHPhal8nlCnoi0DLsk47VqV3GmbHLg7h8J1ZNSPVO0HAkHJkHx5erycgvrh833g2Ubw7NJyiJSExOYcTfAcQlJjO7vw8FbXSZXG6gJwItw2ytLJnVzxtLS8Hwhf48TFB0umerz6BsQ6OC4tYZNRl53cNIWDYAChSGbr+BhaWSmK83ncU/JIJvuteiYvG8UfSWH+iJQMuUMi4F+bGXF+duRvPxmpNqLjaztIbuf4CNAywdAPHR5s/Iy1JSYM1wuB8GPf8E+6JKYjYcD+f3fZcZ9KIHHWuVUpKhqaEnAi3TmlcpzqgWlVgZEMaSI6rK6UpC99/h3kVYN0qX06XH/ulwbhO0+RLc6iuJCL4Vw7gVx/EuW4gP26sprNPU0ROBZhZvt6xEk0pF+XTdKU5eu68mpFwTaPEJnFoNh35Rk5HXXN4D/06CGl3ghWHPf3wGPEhI4q1F/thaW+oyuVxKv2KaWVhaCKb3rkNRexuGLfQn8oGitYgbvQOVXzKWuLx6WE1GXhF9w+gRKlwBOv2krEzuw1UnuHArhum9vSjprMvkciM9EWhmU9jehln9vLkZFce7y4JISVFVTjcbnEob1xfE3DZ/Rl6QnAjLB0NCjHE9hq2jkpiFh0JZExjOu60q06SSLpPLrfREoJlVnbIufNKxOjvO3mL2rotqQgq4GL/cHtyFlUN0OV1a/v0cQvfDy9OhuJrP7IOuRvLF+tP4VinGCF9dJpeb6YlAM7sBDdzpVLsU3/1zjn3BiuqkS9aG9tPg8i7Y+bWajNzqzHpjtbG6Q6BWTyUREbEJvLXIKJP7QZfJ5Xp6ItDMTgjB1109KV/MgdGLj3HjvqJyOu9Xwasf7J4G5/9Rk5Hb3L0Ia96CUt7QTs0EmZIiGbMskNvR8czu702hgrpMLrfTE4GmhL2tFXP6+xCXmMyIvwNIVFVO1/5bcK0Jq96AiBDzZ+QmCQ9g2avGxWI9/wQrWyUxM/2C2XnuNhNfrk6tMrpMLi/QE4GmTMXiDnzTvRb+IRF8vemsmhCbgkaDpkwxVjbLr+V0UqaWyZ2CrvOgUFklMXsu3OaH7efpUqc0/V5Qk6FlPT0RaEp1rFWKQS968Pu+y2w8fl1NSJEK8MpsCD8GW8arycjpAhZA4CJo9gFUaqUkIjzyIW8vCaRScQcmd6mpy+TyED0RaMp92L4a3mUL8cGKIIJvxagJqdYRXhwNR3+HoKVqMnKq8EDYNBYqtIBm45REJCQZZXIJSSm6TC4P0hOBppyNlQWz+nlja23JW4v8eZCQpCao5afg3gjWvw03T6vJyGkeRhjHBeyLGh8JKSqT+2rTGY6FRjK1ey0qFNNlcnmNngi0LFHSuQAzetfhwq0YJqw6oaiczsroI7J1NJo246LMn5GTpKTA6mEQFQ49/gT7Ikpi1gWFM3//FV5rVI72niWVZGjZS08EWpZpXKko77aqzNrAcBYeVHSGj2MJ6PEH3LsM60bm7XK6fT/A+S3QdjK41VMSEXwrmvErj+Pj7sKE9lWVZGjZT08EWpYa4VsR3yrFmLThNIFXI9WEeDSGlhPh9Fo4+LOajOx2eTfs+BJqdoP6Q5VExMYnMWxhAAWsLZnV1xtrS/3rIq9S+soKIdoJIc4JIYKFEP9zOocQYpAQ4rYQIjD19rrK8WjZz8JC8EMvL1yd7BixKIB7sarK6d6GKh1g20QIPagmI7tEhRtlckUqwsszlJXJTVh1gku3Y5jRpw4lnNUscK/lDMomAiGEJTALeAmoDvQRQlRP46FLpZReqbd5qsaj5RyFCtrwcz9vbkfH887SQFJUfHwjBLzyMzi7wfJBeaec7r8yuQfQ8y+wVXPg9t/QJNYFhfNemyo0qqhmIRst51D5jqA+ECylvCSlTACWAJ0V5mm5SK0yhfisUw12n7/NuouJakIKFDIuNnsYAStfyxvldNs/g6sHodMMKK7mM/tjoREsPptAy6rFGd6sgpIMLWcRSs7eAIQQ3YF2UsrXU78fALwgpRz52GMGAV8Dt4HzwBgp5f8scSWEGAoMBXB1dfVZsmRJhsYUExODg0PeOPUtL+yLlJJ5JxLYH57Iuz52eBZTc256ievbqXruJ0LKdudy+QFKMh5R+boUvb2fmqe+4Vqp9lyo/KaSjKh4yWcHHiJkCpMa22NvnfsvGssL/688kpl98fX19ZdS1k3zTimlkhvQA5j32PcDgJ+eeEwRwDb162HAjudt18fHR2aUn59fhp+b0+SVfXkQnyQbfbFRen2+VYZFPFAXtOYtKT91kvLsZnUZUuHrcvuClJNLSznXV8rEOCURCUnJssfs/bLKx5vk/LXblWRkh7zy/4qUmdsX4Kh8yu9VlR8NhQFuj31fBgh/YhK6K6V8VA7zK+CjcDxaDlTAxpKRdexISpa8tSiA+CRFH9+0/xZKeMLqoRBxRU2GKo/K5CytjesFFJXJTVp/msNX7vFNt1q4O6m5ME3LmVROBEeASkKIckIIG6A3sO7xBwghHr86pRNwRuF4tByqhL0F03rUIuhqJJM3KvonYF3AOLgqMX6pJiqqxjY3KWHju3DrNHSbB4Xcnv+cDFhyOJS/DobwZtPydPYqrSRDy7mUTQRSyiRgJLAV4xf8MinlKSHEJCFEp9SHjRZCnBJCBAGjgUGqxqPlbO1qlmRo0/IsOBDCokOKLjYrXA66zIHrQbBFTSeP2fnPh6DF0Hw8VGypJiIkgolrT9GkUlE+aKcvGsuPlDZHSSk3AZue+NnEx76eAEzIbE5iYiJhYWHExT37rzxnZ2fOnMkbbzry2r5cvnyZMS3Kc+FmNBPXnqKMS0GaVVawBm7V9tDoHdj3I7g1AK8+5s8wl2sBsPkDqNASmn6gJOLKnVjeWHCUkoXs+KlPHSz1SmP5Up6oEAwLC8PR0REPD49nVuNGR0fj6KhmEe+slpf2JSoqioSEBG5cD+envt70mHOAEYsCWDG8IVVLOJk/sMUncM0fNowxjhuUqGn+jMx6cA+WDQT74tD1V7Aw/5v3e7EJDJ5/BCkl8wfX1yuN5WN54prxuLg4ihQpovvRcykhBEWKFCEuLg4HWyt+H1QXe1tLXvvjCLeiFHyWb2kF3X4DO+fUcrr75s/IjJQUWP0mRF83roNQUCYXl5jM0AVHuRb5kHkD61KuqL3ZM7TcI09MBICeBHK5x1+/ks4F+G1gPSIfJjLojyNExSm44MzR1SiniwiBtSNyVjnd3u/gwj/GmsNlzH8iXUqK5L3lQRwNieCHnl74uBc2e4aWu+SZiUDLW2qWdmZ2fx8u3IpmyPwjPExQcFqp+4vQ6jM4sx4OzDT/9jPi0k7w+wpqdod65q/eklLyydqTbDx+nQ/bV6VDLV0rremJwGwmT55MjRo1qFWrFl5eXhw6dEhZ1pUrV1i2bNl/38+fP5+RI0c+4xmG5s2bc/To0f/vZ3fv3sXX1xcHB4dnbiOt56rWrHIxfuxVB/+QCIYt9CchKcX8IS+OgqodYdunELLf/NtPj6hwWDEEilSCl6ebvUxOSsmUzWdZdCiUYc0q8EaT8mbdvpZ76YnADA4cOMCGDRsICAjg+PHjbN++HTc3Ned7gzERLF++3CzbsrOz44svvuDbb781y/bMrUOtknzVxZNd528zZlkgySlm/gjnUTmdi7tR5hZ907zbN1VyolGOl/gQeqkpk5u5I5hfdl/i1YbujGtXRX+cqv0nT5w19LjP15/idHjaK1MlJydjaZn+Kyarl3Li05drPPX+69evU7RoUWxtjSs+ixb9v7ZGDw8P+vbti5+fH4mJicydO5cJEyYQHBzM2LFjGTZsGFJKPvjgAzZv3owQgo8//phevXo99efjx4/nzJkzeHl5MXDgQFxcXAgPD6ddu3ZcvHiRLl26MHXqVJP2zd7ensaNGxMcHJzu/y5xcXEMHz6co0ePYmVlxffff4+vry+nTp1i8ODBJCQkkJKSwsqVKylVqhQ9e/YkLCyM5ORkPvnkE3r16mVSTu/6ZYmKS+SrTWexsbRgWvdaWJmzG9/O2TgoO68VrBwCA9YYB5Sz0raJcPWQscJasSpm3/y8PZf4btt5unmX4bOXa+hJQPv/5LmJIDu0adOGSZMmUblyZVq1akWvXr1o1qzZf/e7ublx4MABxowZw6BBg9i3bx9xcXHUqFGDYcOGsWrVKgIDAwkKCuLOnTvUq1ePpk2bsn///jR/PmXKFKZMmcKWLVsA46OhwMBAjh07hq2tLVWqVGHUqFFK35UAzJo1C4ATJ05w9uxZ2rRpw/nz55kzZw5vv/02/fr1IyEhgeTkZDZt2kSpUqXYuHEjAPfvp+9MnaFNK5CQlMK3/5wnKUXyfc/a5l0opYQndPge1r4Ffl8axw6yyqnVxgI69d80Fpoxs9k7L/LNlrO09yzBN908sdDXCmhPyHMTwbP+cld17r2DgwP+/v7s2bMHPz8/evXqxZQpUxg0aBAAnToZF1J7enoSExODo6Mjjo6O2NnZERkZyd69e+nTpw+Wlpa4urrSrFkzjhw58tSfOzn977n1LVu2xNnZGYDq1asTEhKifCLYu3cvo0aNAqBq1aq4u7tz/vx5GjZsyOTJkwkLC6Nr165UqlQJT09P3n//fcaNG0fHjh1p0qRJuvNGtqiElaUFUzafJTEphRl96mBjZcbJoE4/o+J57w9Qpr5x8Zlqdy7A2pFQph60+dKsm5ZS8sP2C8z49wKdapfiu561zftOSssz9L8KM7G0tKR58+Z8/vnnzJw5k5UrV/5336OPjCwsLP77+tH3SUlJT13I/Wk/T8vj27W0tCQpKSm9u5BuTxtf3759WbduHQUKFKBt27bs2LGDypUr4+/vj6enJxMmTGDSpEkZyhzWrAKfdKzOllM3eGuRP3GJZj6b6KVpULK2sSj8vcvm3faTEmJh6QCjRK7HfLAy3wVdUkq+3nyWGf9eoGfdMvzQy0svNak9lf6XYQbnzp3jwoUL/30fGBiIu7u7yc9v2rQpS5cuJTk5mdu3b7N7927q16//1J87OjoSExOjYlfSpWnTpixatAiA8+fPExoaSpUqVbh06RLly5dn9OjRdOrUiePHjxMeHk7BggXp378/77//PgEBARnOHdK4HF90rsG/Z2/Rb94hIsy53KW1nXG8QKC2nE5K48rm22eNMjnnMmbbdGJyCuNXnmBu6oHhKV1r6eoI7Zny3EdD2SEmJoZRo0YRGRmJlZUVFStWZO7cuSY/v0uXLhw4cIDatWsjhGDq1KmUKFHiqT8vUqQIVlZW1K5dm0GDBuHi4mJyVocOHbC2tgagYcOGLF++HA8Pj/9qHtasWcM///xD9er/u6rok8/966+/GDZsGJ6enlhZWTF//nxsbW1ZunQpCxcuxNramhIlSjBx4kSOHDnC2LFjsbCwwNramtmzZ5s85rQMaOhBYXtbxiwLpNvs/fz5Wn3cChfM1Db/4+IBXebC4l6weSx0+sk8233c0d/h+FLw/QgqtDDbZqPjEnlrUQB7LtxhpG9F3mtTWR8Y1p7vaQsV5NRbWgvTnD592qSFGaKiokx6XG6QF/fF1NfxcYcv35W1Ptsqfb74RwaGRph3YNs+MxazCfjL5KeYtHBI2FEpJxWV8q9uUiYnZ3x8T7gW8UC2/WGXrDBho1x6ODRT29KLueRMuXFhGk1Trp5HYVYOfxE7a0t6/nKAFf5h5tu470fg0QQ2vgfXj5tnm4/K5Bxcoetcs5XJ+YdE0OXnfVyLeMj8wfXpWU/tiQJa3qInAi3Xq1jcgbUjGuHj7sL7y4OYuPakea5CtrQyzuu3K2QcL3gYmbntpaTAqqEQcxN6/gkFM9/xI6VkwYEr9J57AFsrS1YMf5HGlYo+93ma9jg9EWh5QhEHWxa8Vv+/xW36/HqQ6/cfZn7DDsWNM3oiQzNfTrfnWwjeZpTJlc58mVxsfBLvLgtKXVSmGOtHNqZKibxRTa5lLT0RaHmGlaUFH7avxsy+dThzPYq2P+xmw/Hw5z/xedwbQutJcHYD7J+RsW1c3GGUyXn2hLpDMj2kgNAI2s/Yw5rAa4xpVZl5r9bFuaB1prer5U96ItDynI61SrFpdBPKF3Ng5N/HGLM0kPsPM1ll3XAEVOsE2z+HK/vS99z7YbDydShWFV7+MVNlconJKfy4/Tw95hwgKVmy5I0GvN2qkr5aWMsUPRFoeZJHUXtWDGvIO60qsS4onJbf7WJ9UHi6LtL7/wgBnWcZp5auGAzRN0x7XlKCUSaXFG+UydlkfAEY/5AIOs7Yy4/bL9C5dik2v9OEF8qbf9EaLf/RE4GZODj8b1vk7t278fb2xsrKihUrVqTruVrmWVla8E6ryqwd0YiSznaMWnyMQX8c4fKd2Ixt0M7J+GUeFwUrXoNkE67e3vYJhB2BzjOhaKUMxd6LTeDD1SfoNns/0XGJ/PpqXb7v5YWTnf4oSDMPPREoVLZsWebPn0/fvn2zeyj5Ws3SzqwZ0YhPX67O0Sv3aP39Lj5de5K7MfHp35hrDej4A4Tsgx3Pqck4uRIOzYEXhkONLumOiktM5uedwTSb6seSw6EMaVyObe82o3V11/SPW9OeIe9dWbx5PNw4keZdBZKTMlYvXMITXpqS7qd5eHgARqeQKeRTaqevX79Or169iIqKIikpidmzZ+Pp6cmgQYM4evQoQghee+01xowZk+4x5heWFoLBjcrRoVZJpm+/wMJDoawMuMbgRh4MetGDIg62z9/II159jHK6fdPB7QWo2uF/H3P7PKwbbZTXtU5fr9LDhGSW+19l9s6LXL8fR6tqxRnXriqVXPUZQZoaeW8iyMWeVkf9999/07ZtWz766COSk5N58OABx44d49q1a5w8eRKAyMhMnuOeTxR3tGNyF09ea1yOb7eeY6ZfML/uuUSvum4MblQOD1MXcW/3DYQHwurh8OZOKPzYal/xMbBsAFjZpatM7m5MPIsPh/LHvivcjU3Ax92FkX5PMwAACb5JREFUH3p50UAfB9AUy3sTwTP+cn+oqIbaXJ5WO12vXj1ee+01EhMTeeWVV/Dy8sLDw4NLly4xatQoOnToQJs2bbJ7+LlKhWIOzO7vQ/CtGObuvsjfh0P580AIDcoXpne9srStUYICNs9YxOhROd0vTWHpq/D6NrAuYFxnsP5tuHMeBqwG59LPHEdScgr7Lt5l6ZFQtp2+SWKypEXV4gxvXoF6HnpReS1rKD1GIIRoJ4Q4J4QIFkKMT+N+WyHE0tT7DwkhPFSOJ6d72hktTZs2Zffu3ZQuXZoBAwawYMECXFxcCAoKonnz5syaNYvXXzf/Quf5QcXiDkztXpu941owtm0VwiPjeGdpIN5fbOPNv46y0j+M29FPOZbg4m7URNw8ARvfB6BU+CY4uQJ8P4TyzdN8WnRcIttO3+T95UHUm7ydgb8f5sDFuwxs6MG2MU35fVA9PQloWUrZOwIhhCUwC2gNhAFHhBDrpJSnH3vYECBCSllRCNEb+AYwbf3CPKhp06b88ssvDBw4kHv37rF7926mTZtGSEgIpUuX5o033iA2NpaAgACaNm1K4cKF6datGxUqVPhvERwtY1yd7BjhW5HhzSpw8PJdtpy8wT+nbrL1lLGGcbmi9tR1d6FWGWcqFHOgQnEHijvaIiq3hSbvG1cNJ8dTMXg1VGoLjd8DIPJBAhdvx3DxViynr0dx5Mo9zlyPIkWCo50VLasWp22NErSoVhxbq/Qvo6pp5qDyo6H6QLCU8hLA/2vvfmOkuqswjn+fxaFDWQRxNwS6aDcFjf9LXErSFAO1KjEKQkpSY7SmMQ0mxDakSY3EgmiDSKIm+kYiGLCtBIomEGtsG7tRX6C0BKS4ra6NDVsaCgto13VLgeOLe5Hd6cLuzs5wZ+Y+n2STuTO/uXNOZriH++93JO0ElgGDC8EyYH36+HHgx5IUZV/snZ3+/n7a2i7PKb9mzRoWLlzI8uXLOXPmDPv27WPdunUcPXr0iuu40rTT27dvZ/PmzRQKBZqbm9mxYwfHjx9nxYoVXLyYzKmzcePGqueYB01N4tabWrj1phbWf/YDHHnlX+x/qZcD/zzDU10n2D1oUrvCBPGO6yfScv1tbCw8zUeO7OaEWrn/5Jc5tukZTvefY+DNy3MeTSpMYN67prH69rksaJ/O/BunV7bDmlmZVK1trqQ7gSUR8ZV0+YvAgohYPWjM8+mYnnT5H+mYUyXruhe4F2DGjBkf3blz55DPmjp1KnPmzBkxpnKb19eiRsylu7t7zL2Mr6WI4Owbwav/CY73XeT0QND3ZvD6uaDp/H+Zf+Egh3kP/RNbaS6IKRNh6nVNzJwsZjU30TJJNNVJb4C+vr6Gub/FuSQWL178XER0DPdaNfcIhvvFl1ad0YwhIrYAWwA6Ojpi0aJFQ17v6uoa1UngavUszkIj5lIsFpk3b17W4YzDcjo7Oyn9fdajRskDnMtoVHO/tAcYPCl6G1A6A9j/x0h6GzAVOF3FmMzMrEQ1C8EBYK6kdkkTgbuAvSVj9gJ3p4/vBH5X7vmBOjytYIP4+zPLTtUKQUScB1YDvwW6gF0RcVTSBklL02FbgXdK6gbWAG+5xHQ0isUivb293pjUqYigt7eXYrGYdShmuVTVG8oi4gngiZLnHhr0eABYOd7PaWtro6enh5MnT1513MDAQMNsbBotl2nTpg256srMrp2GuLO4UCjQ3t4+4rjOzs46Pxl5mXMxs0rxRcxmZjnnQmBmlnMuBGZmOVe1O4urRdJJ4OUy394CnBpxVH1wLrWpUXJplDzAuVzy7ohoHe6FuisE4yHp2SvdYl1vnEttapRcGiUPcC6j4UNDZmY550JgZpZzeSsEW7IOoIKcS21qlFwaJQ9wLiPK1TkCMzN7q7ztEZiZWQkXAjOznMtdIZD0bUl/kXRI0pOSZmUdU7kkbZb0QprPryRNyzqmcklaKemopIuS6u5SP0lLJL0oqVtSWbPo1gJJ2yS9lnYPrGuSZkt6RlJX+tu6L+uYyiGpKOnPkg6neXyr4p+Rt3MEkt4eEf9OH38NeH9ErMo4rLJI+iRJD4fzkjYBRMSDGYdVFknvAy4CPwEeiIhnMw5p1CRNAP4GfIKk2dIB4PMR8dervrEGSfoY0AfsiIgPZh3PeEiaCcyMiIOSpgDPAZ+rt+9FkoDJEdEnqQD8EbgvIvZX6jNyt0dwqQikJjNMa8x6ERFPpn0fAPaTdIGrSxHRFREvZh1HmW4BuiPipYg4B+wElmUcU1ki4vc0SJfAiHg1Ig6mj18n6YtyQ7ZRjV0k+tLFQvpX0e1W7goBgKSHJR0DvgA8NNL4OnEP8Jusg8ipG4Bjg5Z7qMMNTiOTdCMwD/hTtpGUR9IESYeA14CnIqKieTRkIZD0tKTnh/lbBhARayNiNvAoSRe1mjVSLumYtcB5knxq1mhyqVMa5rm63dNsNJKagT3A/SVHBOpGRFyIiJtJ9vpvkVTRw3YN0ZimVETcMcqhjwG/BtZVMZxxGSkXSXcDnwE+Xm6/52tlDN9LvekBZg9abgOOZxSLDZIeU98DPBoRv8w6nvGKiLOSOoElQMVO6DfkHsHVSJo7aHEp8EJWsYyXpCXAg8DSiOjPOp4cOwDMldQuaSJwF7A345hyLz3JuhXoiojvZx1PuSS1XroiUNIk4A4qvN3K41VDe4D3klyh8jKwKiJeyTaq8kjqBq4DetOn9tfxFVDLgR8BrcBZ4FBEfCrbqEZP0qeBHwITgG0R8XDGIZVF0i+ARSTTHZ8A1kXE1kyDKpOk24A/AEdI/r0DfCPtpV43JH0Y2E7y22oCdkXEhop+Rt4KgZmZDZW7Q0NmZjaUC4GZWc65EJiZ5ZwLgZlZzrkQmJnlnAuBmVnOuRCYjYGkWZIer+L6V0n6UrXWbzYc30dgZpZz3iOw3JM0P23uU5Q0OW3+MeykXpJuHEvTFklNkv4uqXXQcrekliuMXy/pgfIyMSuPC4HlXkQcIJkb6DvA94BHIqIiE3pFxEXgEZIpzyGZJ+ZwRJyqxPrNKsGFwCyxgaTDWAdJMaikbcCl4/73AD+r8PrNxsWFwCwxHWgGpgDFsb45bXZ0KG0eMkREHANOSLodWIAbCFmNcSEwS2wBvknS3GfTWN+cNju6OW0eMpyfkhwi2hURF8oP06zyXAgs99LLNc9HxGPAd4H56f/eK2kvyR6HDwtZzfHlo2bXgKQO4AcRsTDrWMxKNWSrSrNaIunrwFe5fOWQWU3xHoHZMCR9CPh5ydNvRMSCCq1/LbCy5Ond9drZzOqbC4GZWc75ZLGZWc65EJiZ5ZwLgZlZzrkQmJnl3P8Axi9NbsHC4rQAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "import torch\n", "import torch.nn as nn\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from utils.common_tools import set_seed\n", "set_seed(1) # 设置随机种子\n", "\n", "inputs = torch.linspace(-3, 3, steps=500)\n", "target = torch.zeros_like(inputs)\n", "\n", "loss_f = nn.SmoothL1Loss(reduction='none')\n", "\n", "loss_smooth = loss_f(inputs, target)\n", "\n", "loss_l1 = np.abs(inputs.numpy())\n", "\n", "plt.plot(inputs.numpy(), loss_smooth.numpy(), label='Smooth L1 Loss')\n", "plt.plot(inputs.numpy(), loss_l1, label='L1 loss')\n", "plt.xlabel('x_i - y_i')\n", "plt.ylabel('loss value')\n", "plt.legend()\n", "plt.grid()\n", "plt.show()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:泊松分布的负对数似然损失函数是怎样?\n", "- `torch.nn.PoissonNLLLoss(log_input: bool = True, full: bool = False, size_average=None, eps: float = 1e-08, reduce=None, reduction: str = 'mean')`\n", "- log_input:输入是否为对数形式,决定计算公式\n", "- full:计算所有loss,默认为False\n", "- eps:修正项,避免log(input)为nan\n", "- 当log_input = True, loss(input, target) = exp (input)-target * input\n", "- 当log_input = False, loss(input, target) = input - target * log(input +eps)\n", "\n", "Q:PoissonNLLLoss以及手算校验的代码示例" ] }, { "cell_type": "code", "execution_count": 42, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "input:tensor([[-1.0276, -0.5631],\n", " [-0.8923, -0.0583]])\n", "target:tensor([[-0.1955, -0.9656],\n", " [ 0.4224, 0.2673]])\n", "Poisson NLL loss:tensor([[0.1570, 0.0258],\n", " [0.7866, 0.9590]])\n", "第一个元素loss: tensor(0.1570)\n" ] } ], "source": [ "inputs = torch.randn((2, 2))\n", "target = torch.randn((2, 2))\n", "\n", "loss_f = nn.PoissonNLLLoss(log_input=True, full=False, reduction='none')\n", "loss = loss_f(inputs, target)\n", "print(\"input:{}\\ntarget:{}\\nPoisson NLL loss:{}\".format(inputs, target, loss))\n", "\n", "idx = 0\n", "loss_1 = torch.exp(inputs[idx, idx]) - target[idx, idx]*inputs[idx, idx]\n", "print(\"第一个元素loss:\", loss_1)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# 4.优化器(一)\n", "\n", "Q:Pytorch中的优化器的代码是怎样?\n", "- ```python\n", "class Optimizer(object):\n", " def __init__(self, params, defaults):\n", " self.defaults = defaults\n", " self.state = defaultdict(dict)\n", " self.param_groups = []\n", "\n", " self.param_groups = [{'params': param_groups}]\n", "```\n", "- defaults:优化器超参数\n", "- state:参数的缓存, 如momentum的缓存\n", "- params_groups:管理的参数组\n", "- _step_count:记录更新次数, 学习率调整中使用\n", "\n", "Q:pytorch的优化器有哪些方法?\n", "- zero_grad():清空所管理参数的梯度\n", "- step():执行一步更新\n", "- add_param_group():添加参数组\n", "- state_dict():获取优化器当前状态信息字典\n", "- load_state_dict():加载状态信息字典\n", "\n", "Q:优化器的step和zero_grad代码示例" ] }, { "cell_type": "code", "execution_count": 44, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "weight before step:tensor([[0.6614, 0.2669],\n", " [0.0617, 0.6213]])\n", "weight after step:tensor([[ 0.5614, 0.1669],\n", " [-0.0383, 0.5213]])\n", "weight in optimizer:140728466157072\n", "weight in weight:140728466157072\n", "\n", "weight.grad is tensor([[1., 1.],\n", " [1., 1.]])\n", "\n", "after optimizer.zero_grad(), weight.grad is\n", "tensor([[0., 0.],\n", " [0., 0.]])\n" ] } ], "source": [ "import torch\n", "import torch.optim as optim\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "weight = torch.randn((2, 2), requires_grad=True)\n", "weight.grad = torch.ones((2, 2))\n", "\n", "optimizer = optim.SGD([weight], lr=0.1)\n", "\n", "print(\"weight before step:{}\".format(weight.data))\n", "optimizer.step() # 修改lr=1 0.1观察结果\n", "print(\"weight after step:{}\".format(weight.data))\n", "\n", "print(\"weight in optimizer:{}\\nweight in weight:{}\\n\".format(id(optimizer.param_groups[0]['params'][0]), id(weight)))\n", "\n", "print(\"weight.grad is {}\\n\".format(weight.grad))\n", "optimizer.zero_grad()\n", "print(\"after optimizer.zero_grad(), weight.grad is\\n{}\".format(weight.grad))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:优化器的add_param_group代码示例" ] }, { "cell_type": "code", "execution_count": 46, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "optimizer.param_groups is\n", "[{'params': [tensor([[0.6614, 0.2669],\n", " [0.0617, 0.6213]], requires_grad=True)], 'lr': 0.1, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n", "\n", "optimizer.param_groups is\n", "[{'params': [tensor([[0.6614, 0.2669],\n", " [0.0617, 0.6213]], requires_grad=True)], 'lr': 0.1, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}, {'params': [tensor([[-0.4519, -0.1661, -1.5228],\n", " [ 0.3817, -1.0276, -0.5631],\n", " [-0.8923, -0.0583, -0.1955]], requires_grad=True)], 'lr': 0.0001, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n" ] } ], "source": [ "import torch\n", "import torch.optim as optim\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "weight = torch.randn((2, 2), requires_grad=True)\n", "weight.grad = torch.ones((2, 2))\n", "\n", "optimizer = optim.SGD([weight], lr=0.1)\n", "\n", "print(\"optimizer.param_groups is\\n{}\".format(optimizer.param_groups))\n", "\n", "w2 = torch.randn((3, 3), requires_grad=True)\n", "\n", "optimizer.add_param_group({\"params\": w2, 'lr': 0.0001})\n", "\n", "print(\"\\noptimizer.param_groups is\\n{}\".format(optimizer.param_groups))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Q:优化器的state_dict和load_state_dict代码示例" ] }, { "cell_type": "code", "execution_count": 48, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "state_dict before step:\n", " {'state': {}, 'param_groups': [{'lr': 0.1, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140728466498736]}]}\n", "state_dict after step:\n", " {'state': {140728466498736: {'momentum_buffer': tensor([[6.5132, 6.5132],\n", " [6.5132, 6.5132]])}}, 'param_groups': [{'lr': 0.1, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140728466498736]}]}\n", "\n", "state_dict before load state:\n", " {'state': {}, 'param_groups': [{'lr': 0.1, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140728466498736]}]}\n", "state_dict after load state:\n", " {'state': {140728466498736: {'momentum_buffer': tensor([[6.5132, 6.5132],\n", " [6.5132, 6.5132]])}}, 'param_groups': [{'lr': 0.1, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140728466498736]}]}\n" ] } ], "source": [ "import torch\n", "import torch.optim as optim\n", "from utils.common_tools import set_seed\n", "\n", "set_seed(1) # 设置随机种子\n", "\n", "weight = torch.randn((2, 2), requires_grad=True)\n", "weight.grad = torch.ones((2, 2))\n", "\n", "# ----------------------------------- state_dict -----------------------------------\n", "\n", "optimizer = optim.SGD([weight], lr=0.1, momentum=0.9)\n", "opt_state_dict = optimizer.state_dict()\n", "\n", "print(\"state_dict before step:\\n\", opt_state_dict)\n", "\n", "for i in range(10):\n", " optimizer.step()\n", "\n", "print(\"state_dict after step:\\n\", optimizer.state_dict())\n", "\n", "torch.save(optimizer.state_dict(), \"optimizer_state_dict.pkl\")\n", "\n", "# -----------------------------------load state_dict -----------------------------------\n", "\n", "optimizer = optim.SGD([weight], lr=0.1, momentum=0.9)\n", "state_dict = torch.load(\"optimizer_state_dict.pkl\")\n", "\n", "print(\"\\nstate_dict before load state:\\n\", optimizer.state_dict())\n", "optimizer.load_state_dict(state_dict)\n", "print(\"state_dict after load state:\\n\", optimizer.state_dict())" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# 5.优化器(二)\n", "\n", "Q:optim.SGD是怎样?\n", "- `torch.optim.SGD(params, lr=, momentum=0, dampening=0, weight_decay=0, nesterov=False)`\n", "- params:管理的参数组\n", "- lr:初始学习率\n", "- momentum:动量系数, 贝塔\n", "- weight_decay: L2 正则化系数\n", "- nesterov:是否采用 NAG" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.6" } }, "nbformat": 4, "nbformat_minor": 4 }