From bcda607497441e4ef468c4e31a749910249a2a6d Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Tue, 13 Oct 2020 01:06:35 -0300 Subject: [PATCH 01/18] Update requirements --- .gitignore | 2 ++ requirements.txt | 14 +++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 763513e..c16573f 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ .ipynb_checkpoints +.idea +env/ diff --git a/requirements.txt b/requirements.txt index 8c91a19..58ba14d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ -ipython==7.8.0 -jupyter==1.0.0 -matplotlib==3.1.1 -numpy==1.17.2 -scikit-learn==0.21.3 -torch==1.2.0 -torchvision==0.4.0 +ipython +jupyter +matplotlib +numpy +scikit-learn +torch +torchvision From 47fbe98476ad15e731c5378ea61559adac7088b5 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Tue, 13 Oct 2020 01:19:07 -0300 Subject: [PATCH 02/18] Add README --- README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..87ae473 --- /dev/null +++ b/README.md @@ -0,0 +1,24 @@ +# Installation + +First, clone this repository using `git`: + +```sh +git clone https://github.com/mtreviso/pytorch-lecture.git +cd pytorch-lecture +``` + +It is highly recommended that you work inside a Python virtualenv. +Note that in this lecture we will be using Python 3.6+. +You can create a virtualenv and install all dependencies via: +```sh +python3 -m venv env +source env/bin/activate +pip3 install -r requirements.txt +``` + +Run Jupyter: +```sh +jupyter notebook +``` + +After running the command above, your browser will automatically open the Jupyter homepage: `http://localhost:8888/tree`. From 33b58907a387723db7a8ec09980ad13eeba8e7ae Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Tue, 13 Oct 2020 07:06:55 -0300 Subject: [PATCH 03/18] Add my changes --- 00-intro.ipynb | 17 +- 01-pytorch-basics.ipynb | 1451 +++++++++++++++++++++++++++++++----- 02-linear-regression.ipynb | 425 ++++++++++- 03-modules-and-mlps.ipynb | 2 +- 4 files changed, 1667 insertions(+), 228 deletions(-) diff --git a/00-intro.ipynb b/00-intro.ipynb index 94d62ed..7262673 100644 --- a/00-intro.ipynb +++ b/00-intro.ipynb @@ -6,16 +6,16 @@ "source": [ "# Introduction\n", "\n", - "Material for this tutorial is here: https://github.com/goncalomcorreia/pytorch-lecture\n", + "Material for this lecture is here: https://github.com/mtreviso/pytorch-lecture\n", "\n", "**Note:**\n", - "If you use PyTorch on a daily basis, you will most probably not learn a lot during this tutorial.\n", + "If you use PyTorch on a daily basis, you will most probably not learn a lot during this lecture.\n", "\n", "**Goals:**\n", "- understand PyTorch concepts (e.g. autograd, broadcasting, ...) and understand what it can and cannot do\n", "- be aware of some handy tools/libraries\n", "- be able to create simple neural networks\n", - "- learn the some tools that will help to code more complicated models in the future" + "- learn some tools that will help to code more complex models in the future" ] }, { @@ -25,11 +25,9 @@ "# PyTorch Overview\n", "\n", "\n", - "> \"PyTorch - Tensors and Dynamic neural networks in Python\n", - "with strong GPU acceleration.\n", - "PyTorch is a deep learning framework for fast, flexible experimentation.\"\n", + "> \"PyTorch - An open source machine learning framework that accelerates the path from research prototyping to production deployment.\"\n", ">\n", - "> -- https://pytorch.org/*\n", + "> -- https://pytorch.org/\n", "\n", "This was the tagline prior to PyTorch 1.0.\n", "Now it's:\n", @@ -137,7 +135,8 @@ "## Tutorials I based this on...\n", "\n", "- https://github.com/sotte/pytorch_tutorial\n", - "- https://github.com/erickrf/pytorch-lecture" + "- https://github.com/erickrf/pytorch-lecture\n", + "- https://github.com/goncalomcorreia/pytorch-lecture" ] }, { @@ -168,7 +167,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/01-pytorch-basics.ipynb b/01-pytorch-basics.ipynb index 4976bd5..d5f6815 100644 --- a/01-pytorch-basics.ipynb +++ b/01-pytorch-basics.ipynb @@ -6,7 +6,7 @@ "source": [ "# An introduction to Pytorch\n", "\n", - "Pytorch is a platform for deep learning in Python. \n", + "Pytorch is a platform for deep learning in Python/C++. In this lecture we will focus in the Python landscape. \n", "\n", "It provides tools for efficiently creating, training, testing and analyzing neural networks:\n", "\n", @@ -28,19 +28,45 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 45, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "import numpy as np\n", - "import torch" + "import torch\n", + "\n", + "np.random.seed(0)\n", + "torch.manual_seed(0)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "v1: [0 1 2 3 4 5 6 7 8 9]\n", + "\n", + "v2: [10 11 12 13 14 15 16 17 18 19]\n", + "\n", + "Dot product: 735\n" + ] + } + ], "source": [ "v1 = np.arange(10)\n", "v2 = np.arange(10, 20)\n", @@ -52,9 +78,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "v1: tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])\n", + "\n", + "v2: tensor([10, 11, 12, 13, 14, 15, 16, 17, 18, 19])\n", + "\n", + "Dot product: 735\n" + ] + } + ], "source": [ "v1 = torch.arange(10)\n", "v2 = torch.arange(10, 20)\n", @@ -73,9 +111,23 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 8, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "v3: [2 4 6 8]\n", + "\n", + "v4: [0.72639872 0.73935424 0.68015035 0.47352003 0.45043365 0.71525855\n", + " 0.18119405 0.1819192 0.9227315 0.67426907]\n", + "\n" + ] + } + ], "source": [ "v3 = np.array([2, 4, 6, 8])\n", "v4 = np.random.random(10)\n", @@ -86,9 +138,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "v3: tensor([2, 4, 6, 8])\n", + "\n", + "v4: tensor([0.8405, 0.6914, 0.0403, 0.7377, 0.3544, 0.5140, 0.4915, 0.1579, 0.4751,\n", + " 0.7653])\n", + "\n" + ] + } + ], "source": [ "v3 = torch.tensor([2, 4, 6, 8])\n", "v4 = torch.rand(10)\n", @@ -101,14 +165,23 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### You can also change a value inside the tensor manually" + "#### You can also change a value inside the array manually" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([0.8405, 0.1000, 0.0403, 0.7377, 0.3544, 0.5140, 0.4915, 0.1579, 0.4751,\n", + " 0.7653])\n" + ] + } + ], "source": [ "v4[1] = 0.1\n", "print(v4)" @@ -130,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ @@ -139,9 +212,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(0)\n", + "torch.Size([])\n" + ] + } + ], "source": [ "print(v1[0])\n", "print(v1[0].shape)" @@ -156,9 +238,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "True\n" + ] + } + ], "source": [ "number = v1[0].item()\n", "print(number)\n", @@ -174,9 +265,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1., 0., 0.],\n", + " [0., 1., 0.],\n", + " [0., 0., 1.]])" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "A = torch.eye(3)\n", "A" @@ -184,9 +288,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "array([[1., 0., 0.],\n", + " [0., 1., 0.],\n", + " [0., 0., 1.]], dtype=float32)" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# torch --> numpy\n", "B = A.numpy()\n", @@ -195,9 +312,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1., 0., 0.],\n", + " [0., 1., 0.],\n", + " [0., 0., 1.]], dtype=torch.float64)" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# numpy --> torch\n", "torch.from_numpy(np.eye(3))" @@ -212,36 +342,80 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v1" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([10, 11, 12, 13, 14, 15, 16, 17, 18, 19])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v2" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([10, 12, 14, 16, 18, 20, 22, 24, 26, 28])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v1 + v2" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 0, 11, 24, 39, 56, 75, 96, 119, 144, 171])" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v1 * v2" ] @@ -255,21 +429,45 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "Integer division of tensors using div or / is no longer supported, and in a future release div will perform true division as in Python 3. Use true_divide or floor_divide (// in Python) instead.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mv1\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mv2\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m: Integer division of tensors using div or / is no longer supported, and in a future release div will perform true division as in Python 3. Use true_divide or floor_divide (// in Python) instead." + ] + } + ], "source": [ "v1 / v2 " ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0.0000, 0.0909, 0.1667, 0.2308, 0.2857, 0.3333, 0.3750, 0.4118, 0.4444,\n", + " 0.4737])" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "x = v1.to(torch.float)\n", - "y = v2.to(torch.float)\n", + "x = v1.float()\n", + "y = v2.float()\n", "x / y" ] }, @@ -282,27 +480,60 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0., 1., 2., 3., 4., 5., 6., 7., 8., 9.])" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "x" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "x + 1" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 0., 1., 4., 9., 16., 25., 36., 49., 64., 81.])" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "x ** 2" ] @@ -316,9 +547,38 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "m1: tensor([[0.7911, 0.7790, 0.3392, 0.1068],\n", + " [0.8816, 0.0246, 0.8745, 0.9065],\n", + " [0.5135, 0.7818, 0.1493, 0.4424],\n", + " [0.0682, 0.0821, 0.3905, 0.4508],\n", + " [0.6486, 0.0946, 0.3196, 0.8485]])\n", + "\n", + "m2: tensor([[0.2707, 0.4952, 0.7372, 0.4750, 0.1376],\n", + " [0.8742, 0.8034, 0.6029, 0.8402, 0.8754],\n", + " [0.3132, 0.6785, 0.7391, 0.8908, 0.3854],\n", + " [0.1835, 0.9370, 0.7891, 0.0467, 0.4983]])\n", + "\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "1D tensors expected, got 2D, 2D tensors at ../aten/src/TH/generic/THTensorEvenMoreMath.cpp:83", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"m1: %s\\n\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mm1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"m2: %s\\n\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm1\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m: 1D tensors expected, got 2D, 2D tensors at ../aten/src/TH/generic/THTensorEvenMoreMath.cpp:83" + ] + } + ], "source": [ "m1 = torch.rand(5, 4)\n", "m2 = torch.rand(4, 5)\n", @@ -337,20 +597,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": { "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[1.0210, 1.3478, 1.3878, 1.3374, 0.9747],\n", + " [0.7003, 1.8990, 2.0262, 1.2607, 0.9315],\n", + " [0.9505, 1.3983, 1.3094, 1.0544, 1.0331],\n", + " [0.2952, 0.7871, 0.7441, 0.4703, 0.4564],\n", + " [0.5140, 1.4091, 1.4409, 0.7118, 0.7180]])\n" + ] + } + ], "source": [ "print(m1.mm(m2))" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[1.0210, 1.3478, 1.3878, 1.3374, 0.9747],\n", + " [0.7003, 1.8990, 2.0262, 1.2607, 0.9315],\n", + " [0.9505, 1.3983, 1.3094, 1.0544, 1.0331],\n", + " [0.2952, 0.7871, 0.7441, 0.4703, 0.4564],\n", + " [0.5140, 1.4091, 1.4409, 0.7118, 0.7180]])\n" + ] + } + ], "source": [ "print(m1 @ m2)" ] @@ -364,9 +648,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[[1.3949, 1.0731, 1.3313, 0.8972, 1.3460],\n", + " [1.1715, 0.9119, 1.2318, 0.5970, 1.1741],\n", + " [0.6570, 0.4586, 0.6742, 0.7064, 0.8182],\n", + " [0.7295, 0.5646, 0.8505, 1.0040, 1.1814],\n", + " [1.0063, 0.5420, 0.8679, 0.7967, 0.8066]],\n", + "\n", + " [[0.5311, 0.6051, 0.5187, 0.2731, 0.2563],\n", + " [0.8225, 0.7501, 0.7612, 0.4548, 0.3712],\n", + " [0.9531, 0.5223, 0.5775, 0.3952, 0.3753],\n", + " [1.1869, 0.7789, 0.7322, 0.5213, 0.5131],\n", + " [0.5715, 0.3806, 0.2838, 0.2202, 0.2585]]])\n" + ] + } + ], "source": [ "m1 = torch.rand(2, 5, 4)\n", "m2 = torch.rand(2, 4, 5)\n", @@ -383,13 +685,203 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 31, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[[1.3949, 1.0731, 1.3313, 0.8972, 1.3460],\n", + " [1.1715, 0.9119, 1.2318, 0.5970, 1.1741],\n", + " [0.6570, 0.4586, 0.6742, 0.7064, 0.8182],\n", + " [0.7295, 0.5646, 0.8505, 1.0040, 1.1814],\n", + " [1.0063, 0.5420, 0.8679, 0.7967, 0.8066]],\n", + "\n", + " [[0.5311, 0.6051, 0.5187, 0.2731, 0.2563],\n", + " [0.8225, 0.7501, 0.7612, 0.4548, 0.3712],\n", + " [0.9531, 0.5223, 0.5775, 0.3952, 0.3753],\n", + " [1.1869, 0.7789, 0.7322, 0.5213, 0.5131],\n", + " [0.5715, 0.3806, 0.2838, 0.2202, 0.2585]]])\n" + ] + } + ], "source": [ "print(m1 @ m2)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "What if I have even more dimensions?" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "Expected 3-dimensional tensor, but got 4-dimensional tensor for argument #1 'batch1' (while checking arguments for bmm)", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mm2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm1\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbmm\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m: Expected 3-dimensional tensor, but got 4-dimensional tensor for argument #1 'batch1' (while checking arguments for bmm)" + ] + } + ], + "source": [ + "m1 = torch.rand(2, 3, 5, 4)\n", + "m2 = torch.rand(2, 3, 4, 5)\n", + "\n", + "print(m1.bmm(m2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`.bmm` works with 3d tensors. We can use the more general `matmul` instead. In fact, the `@` operator is a shorthand for `matmul`." + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([2, 3, 5, 5])\n", + "tensor([[[[1.5123, 1.6268, 1.8535, 2.1311, 1.3917],\n", + " [1.0503, 0.6726, 1.0155, 1.1335, 0.8117],\n", + " [0.6293, 0.2779, 0.4590, 0.5317, 0.4186],\n", + " [1.0184, 1.3185, 1.3476, 1.5560, 1.0910],\n", + " [0.5591, 0.5152, 0.7758, 0.8843, 0.3094]],\n", + "\n", + " [[1.2068, 1.1475, 1.1560, 1.0750, 1.2943],\n", + " [0.3632, 0.5753, 0.4025, 0.2080, 0.4787],\n", + " [0.9171, 0.6348, 0.6151, 0.6738, 0.9318],\n", + " [0.6193, 1.8378, 1.1507, 0.9540, 1.1438],\n", + " [0.7270, 1.4191, 1.0204, 1.2311, 1.0825]],\n", + "\n", + " [[1.4094, 1.1296, 0.6805, 1.5907, 1.6366],\n", + " [0.4048, 0.2806, 0.2585, 0.4226, 0.2137],\n", + " [1.1688, 0.9231, 0.6348, 1.2367, 0.9975],\n", + " [0.8251, 0.5466, 0.5144, 1.2760, 0.8789],\n", + " [0.8575, 0.6042, 0.4822, 1.4390, 1.2007]]],\n", + "\n", + "\n", + " [[[0.7810, 0.5242, 0.5863, 1.0640, 0.7554],\n", + " [0.7935, 0.1950, 0.5598, 0.4831, 0.3593],\n", + " [1.6333, 0.8133, 1.2474, 1.7215, 1.1238],\n", + " [0.8468, 0.6065, 0.2264, 1.4667, 0.7630],\n", + " [1.3928, 0.6515, 0.8458, 1.4605, 1.1252]],\n", + "\n", + " [[1.1226, 1.4591, 1.3862, 0.8904, 0.9946],\n", + " [1.1494, 1.1454, 1.4351, 0.6208, 0.6796],\n", + " [1.1410, 1.4001, 1.5988, 1.2281, 0.9615],\n", + " [0.9949, 1.1179, 1.3078, 0.3292, 0.6981],\n", + " [1.1931, 1.3291, 1.6020, 0.9667, 0.8592]],\n", + "\n", + " [[1.2809, 0.4417, 1.1952, 0.6683, 0.9444],\n", + " [1.3687, 0.5593, 1.0666, 0.8061, 1.0604],\n", + " [1.8253, 0.7350, 1.5180, 1.0972, 1.5916],\n", + " [1.4248, 0.5172, 0.7330, 0.7102, 1.0248],\n", + " [1.2086, 0.2634, 0.8638, 0.4023, 0.7880]]]])\n" + ] + } + ], + "source": [ + "print(m1.matmul(m2).shape)\n", + "print(m1.matmul(m2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Anoter option is to use the powerful `einsum` function. Let's say our input have the following representation:\n", + "- `b` = batch size \n", + "- `c` = channels\n", + "- `i` = `m1` timesteps\n", + "- `j` = `m2` timesteps\n", + "- `d` = hidden size" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[1.5123, 1.6268, 1.8535, 2.1311, 1.3917],\n", + " [1.0503, 0.6726, 1.0155, 1.1335, 0.8117],\n", + " [0.6293, 0.2779, 0.4590, 0.5317, 0.4186],\n", + " [1.0184, 1.3185, 1.3476, 1.5560, 1.0910],\n", + " [0.5591, 0.5152, 0.7758, 0.8843, 0.3094]],\n", + "\n", + " [[1.2068, 1.1475, 1.1560, 1.0750, 1.2943],\n", + " [0.3632, 0.5753, 0.4025, 0.2080, 0.4787],\n", + " [0.9171, 0.6348, 0.6151, 0.6738, 0.9318],\n", + " [0.6193, 1.8378, 1.1507, 0.9540, 1.1438],\n", + " [0.7270, 1.4191, 1.0204, 1.2311, 1.0825]],\n", + "\n", + " [[1.4094, 1.1296, 0.6805, 1.5907, 1.6366],\n", + " [0.4048, 0.2806, 0.2585, 0.4226, 0.2137],\n", + " [1.1688, 0.9231, 0.6348, 1.2367, 0.9975],\n", + " [0.8251, 0.5466, 0.5144, 1.2760, 0.8789],\n", + " [0.8575, 0.6042, 0.4822, 1.4390, 1.2007]]],\n", + "\n", + "\n", + " [[[0.7810, 0.5242, 0.5863, 1.0640, 0.7554],\n", + " [0.7935, 0.1950, 0.5598, 0.4831, 0.3593],\n", + " [1.6333, 0.8133, 1.2474, 1.7215, 1.1238],\n", + " [0.8468, 0.6065, 0.2264, 1.4667, 0.7630],\n", + " [1.3928, 0.6515, 0.8458, 1.4605, 1.1252]],\n", + "\n", + " [[1.1226, 1.4591, 1.3862, 0.8904, 0.9946],\n", + " [1.1494, 1.1454, 1.4351, 0.6208, 0.6796],\n", + " [1.1410, 1.4001, 1.5988, 1.2281, 0.9615],\n", + " [0.9949, 1.1179, 1.3078, 0.3292, 0.6981],\n", + " [1.1931, 1.3291, 1.6020, 0.9667, 0.8592]],\n", + "\n", + " [[1.2809, 0.4417, 1.1952, 0.6683, 0.9444],\n", + " [1.3687, 0.5593, 1.0666, 0.8061, 1.0604],\n", + " [1.8253, 0.7350, 1.5180, 1.0972, 1.5916],\n", + " [1.4248, 0.5172, 0.7330, 0.7102, 1.0248],\n", + " [1.2086, 0.2634, 0.8638, 0.4023, 0.7880]]]])" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.einsum('bcid,bcdj->bcij', m1, m2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "See more about `einsum` here: https://pytorch.org/docs/master/generated/torch.einsum.html#torch.einsum" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -403,19 +895,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 46, "metadata": {}, "outputs": [], "source": [ "m = torch.rand(5, 4)\n", - "v = torch.rand(4)" + "v = torch.arange(4)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 47, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "m: tensor([[0.4963, 0.7682, 0.0885, 0.1320],\n", + " [0.3074, 0.6341, 0.4901, 0.8964],\n", + " [0.4556, 0.6323, 0.3489, 0.4017],\n", + " [0.0223, 0.1689, 0.2939, 0.5185],\n", + " [0.6977, 0.8000, 0.1610, 0.2823]])\n", + "\n", + "v: tensor([0, 1, 2, 3])\n", + "\n" + ] + } + ], "source": [ "print(\"m:\", m)\n", "print()\n", @@ -425,26 +932,53 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 49, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "m + v:\n", + " tensor([[0.4963, 1.7682, 2.0885, 3.1320],\n", + " [0.3074, 1.6341, 2.4901, 3.8964],\n", + " [0.4556, 1.6323, 2.3489, 3.4017],\n", + " [0.0223, 1.1689, 2.2939, 3.5185],\n", + " [0.6977, 1.8000, 2.1610, 3.2823]])\n" + ] + } + ], "source": [ "m_plus_v = m + v\n", - "print(\"m + v:\", m_plus_v)" + "print(\"m + v:\\n\", m_plus_v)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Let's see row by row" + "Proof check" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 50, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "m[0] = tensor([0.4963, 0.7682, 0.0885, 0.1320])\n", + "\n", + "v = tensor([0, 1, 2, 3])\n", + "\n", + "m[0] + v = tensor([0.4963, 1.7682, 2.0885, 3.1320])\n", + "\n", + "(m + v)[0] = tensor([0.4963, 1.7682, 2.0885, 3.1320])\n" + ] + } + ], "source": [ "print(\"m[0] = %s\\n\" % m[0])\n", "print(\"v = %s\\n\" % v)\n", @@ -463,27 +997,61 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 51, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([4])" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v.shape" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 52, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3])" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 53, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 1],\n", + " [2, 3]])" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v = v.view(2, 2)\n", "v" @@ -491,9 +1059,23 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 54, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0],\n", + " [1],\n", + " [2],\n", + " [3]])" + ] + }, + "execution_count": 54, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v = v.view(4, 1)\n", "v" @@ -508,9 +1090,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 55, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "The size of tensor a (5) must match the size of tensor b (4) at non-singleton dimension 0", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mm\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (5) must match the size of tensor b (4) at non-singleton dimension 0" + ] + } + ], "source": [ "m + v" ] @@ -524,9 +1118,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 56, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.4963, 1.7682, 2.0885, 3.1320],\n", + " [0.3074, 1.6341, 2.4901, 3.8964],\n", + " [0.4556, 1.6323, 2.3489, 3.4017],\n", + " [0.0223, 1.1689, 2.2939, 3.5185],\n", + " [0.6977, 1.8000, 2.1610, 3.2823]])" + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "v = v.view(1, 4)\n", "m + v" @@ -536,42 +1145,107 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Broadcasting can be tricky sometimes:" + "### General Broadcast Semantics\n", + "\n", + "See more here: https://pytorch.org/docs/master/notes/broadcasting.html" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Two tensors are “broadcastable” if the following rules hold:\n", + "\n", + "- Each tensor has at least one dimension.\n", + "\n", + "- When iterating over the dimension sizes, starting at the trailing dimension, the dimension sizes must either be equal, one of them is 1, or one of them does not exist." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 62, "metadata": {}, "outputs": [], "source": [ - "u = torch.rand(4, 1)\n", - "u + v" + "x = torch.rand(5,7,3)\n", + "y = torch.rand(5,7,3)\n", + "z = x + y\n", + "# same shapes are always broadcastable (i.e. the above rules always hold)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 66, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "The size of tensor a (0) must match the size of tensor b (2) at non-singleton dimension 1", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mz\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;31m# x and y are not broadcastable, because x does not have at least 1 dimension\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (0) must match the size of tensor b (2) at non-singleton dimension 1" + ] + } + ], "source": [ - "u" + "x = torch.rand((0,))\n", + "y = torch.rand(2,2)\n", + "z = x + y\n", + "# x and y are not broadcastable, because x does not have at least 1 dimension" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 68, "metadata": {}, "outputs": [], "source": [ - "v" + "# can line up trailing dimensions\n", + "x = torch.empty(5,3,4,1)\n", + "y = torch.empty( 3,1,1)\n", + "z = x + y\n", + "# x and y are broadcastable.\n", + "# 1st trailing dimension: both have size 1\n", + "# 2nd trailing dimension: y has size 1\n", + "# 3rd trailing dimension: x size == y size\n", + "# 4th trailing dimension: y dimension doesn't exist" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": {}, + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "The size of tensor a (2) must match the size of tensor b (3) at non-singleton dimension 1", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mempty\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mempty\u001b[0m\u001b[0;34m(\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mz\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;31m# x and y are not broadcastable, because in the 3rd trailing dimension 2 != 3\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (2) must match the size of tensor b (3) at non-singleton dimension 1" + ] + } + ], + "source": [ + "# but:\n", + "x = torch.empty(5,2,4,1)\n", + "y = torch.empty( 3,1,1)\n", + "z = x + y\n", + "# x and y are not broadcastable, because in the 3rd trailing dimension 2 != 3" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Always take care with tensor shapes! It is a good practice to verify in the interpreter how some expression is evaluated before inserting into your model code." + "Always take care with tensor shapes! It is a good practice to verify in the interpreter how some expression is evaluated before inserting into your model code. \n", + "\n", + "In other words, **you can use pytorch's dynamic graph creation ability to debug your model by printing tensor shapes!**" ] }, { @@ -585,14 +1259,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 71, "metadata": {}, "outputs": [], "source": [ "%matplotlib inline\n", "\n", "import matplotlib\n", - "import numpy as np\n", "import matplotlib.pyplot as pl" ] }, @@ -605,7 +1278,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 72, "metadata": {}, "outputs": [], "source": [ @@ -614,9 +1287,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 73, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([200])" + ] + }, + "execution_count": 73, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "x.shape" ] @@ -630,9 +1314,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 74, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 74, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO29eXSkV3Xo+9tVmlWluUpjj5K6Wz233e42NhgbD7FNwEAG7JuAc0Oen19gLchL7orzeI+wknfvJcnKsEgILCfhQSYIJCbuCwZjDNhgPHW3e1S3xh6k1lSaVZpLdd4f9X1yoVZ1S6r6hqo6v7Vqqeobt3bVd/Y5Z++ztyil0Gg0Gk324nFaAI1Go9E4izYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+VoQ6DRaDRZTo7TAmyEqqoqtXXrVqfF0Gg0mrTixIkTw0qpwMrtaWkItm7dyvHjx50WQ6PRaNIKEbmy2nY9NaTRaDRZjjYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+VoQ6DRaDRZTkoMgYh8WUSGRORcgv0iIp8XkU4ROSMit8Tte1BE2ox9T6VCHo1Go9GsnVSNCL4CPHiD/Q8BzcbrCeCLACLiBb5g7N8NPCYiu1Mkk0aj0WjWQErWESilXhaRrTc45BHgH1Us5/VrIlImIrXAVqBTKdUNICJfN45tTYVcmtQyNDnHq90jTM4u8vC+Wip9+U6LlFUMh+d5rXuE0NQ897VUs6miyGmRsorp+Qg/uDBIeD7CwU1l7KkrdVqklGHXgrJ6oCfuc6+xbbXtR1e7gIg8QWw0webNm62RUpOQb5/p4//8xmkWIlEA/ujbrTz1UAsfe+c2hyXLDl5uD/HJr7/F2MwiAP/juQt87J3b+f0HdyIiDkuX+bxxaZTf/eYpekZnARCBj9y+hU+/t4X8HK/D0iWPXYZgtV+qusH26zcq9TTwNMDhw4d1NR0b+ebxHv7bv5/h8JZyPvv+PeR6PfzZ82388bdbyfUKH33HVqdFzGhebg/xG//fGzQH/fz947dRUZzHX/+wgy+91EVhrpdP3tfstIgZzcWBST765depLingnz92lC2VRfzDTy/xlZ9dZnFJ8T8/tM9pEZPGLkPQC2yK+9wA9AF5CbZrXELP6Ax/eOw8dzRW8uXfuI2C3Fjv54u/fgv/xz+f4A+PnefQpnL2NWTOMNlNjE4v8LvfPE1T0Mczv30HxfmxR/bPf+UAAH/5g3Zaav08sKfGSTEzlqm5RX77n09SUpDLN598B0F/AQCfff8eCvO8fPHHXRxoKOXRI+k9S2FX+Ogx4KNG9NDtwIRSqh94E2gWkW0ikgc8ahyrcQFKKZ565gweEf7sVw4sGwGAXK+Hv/jwQSqL8/jMsXNEo3qQZgV/eOw8EzOL/NWHDy0bAQAR4XMf2s/Oaj//73cuMB9ZclDKzOVvftjJ5ZFp/vqxQ8tGwOT3HtjJHY2V/PfnLjA+s+CQhKkhVeGjXwNeBXaKSK+IfExEnhSRJ41DngO6gU7g74DfBlBKRYBPAM8DF4BvKKXOp0ImTfL8uD3EK50j/P6DO6kvK7xuf0lBLk891MJbV8d55q1rDkiY2bQNTPG/TvfxxF3b2V1Xct3+vBwPn35vC1dHZ/jHn62aS0yTBEOTc3z11ct84FA9R7dXXrff6xE+877dhOcjfPGlLvsFTCGpihp67Cb7FfDxBPueI2YoNC7jiz/uora0gA/flnjY+6FD9XzlZ5f44o87+dChejwe7bhMFX/9ww6K87z81rsSO+Tv2hHg3TsC/M2POvn127dQmJf+jku38IUfdRJZUnzy3sQ+mF01JXzwYD1feeUyv3nnNqpLChIe62b0ymLNqpy4MsYbl0b5rXdtJy8n8c/E4xF+653b6QpN83JHyEYJM5vuUJjvnO3n8Tu2UlaUd8Njf/vuRiZmFzl2Wo/KUsX4zAJfe7OHX761gS2VxTc89pP3NbOwFOVfX79qk3SpRxsCzap8+ZVLlBbm8uhtm2567MP7agn68/nyK5etFyxL+NobV/GK8F/vvHl47pFtFeys9vOPr14hNvjWJMszJ6+xEImuKSJuS2UxdzUH+Lc3e4gsRa0XzgK0IdBcx8TsIi+0DvLBQ/U/56BMRF6Oh1+/fQsvt4e4MjJtg4SZzeJSlG+91cd7dgUJ+G++aE9E+Mg7tnC+b5KTV8dtkDCzUUrxb2/2cKChdFXfzGr8l6ObGZic40dt6Tkq1oZAcx3Pne1nIRLlQ7fUr/mcX7q1AYD/dVpH/ybLS20hhsPz/Mrhm4/GTD54qJ6iPC//cbLXQsmyg7d6xmkbnFpXSOh7dgUJ+vP52hvpOT2kDYHmOp452UtT0Me++rWvDagvK+S2reUc04Ygaf79RC9Vvjzu3nldadmEFOfncG9LNd87N5C20xNu4dm3rlGQ6+F9B+rWfE6u18MHD9XzcnuICWP1dzqhDYHm5+gdm+HNy2N88FD9ulMXvP9AHe2DYS4OTFokXeYzsxDhR21D/OL+OnK963s837uvltHpBV7rHrVIuswnGlU8f36Qd+8I4FvDtGg8D+2rJRJV/ODCoEXSWYc2BJqf44XW2I/4vftq133uw/tq8XqEY6f0qGCjvNw+zHwkygN7qtd97t07AxTnefnOWa3/jXK6d5yByTke3Lv+ldoHGkqpKy3gu+cGLJDMWrQh0PwcL14YoinoY2vVjUPmVqPSl8/RbRW8eGHIAsmyg++3DlBamMuRrRXrPrcg17s8PbSkV3pviO+dHyDHI7xn1/oNsYjwC3treLkjRHg+YoF01qENgWaZyblFXr80wr0twQ1f456dQdoGp7g2PptCybKDyFKUFy8McW9LkJx1TguZ3L+7mrGZRU716Oih9aKU4vlzA9zRVEVpYe6GrvHQ3loWIlF+3JZenSFtCDTLvNweYnFJcX/L+ntDJvfsijk40+1BcANvXh5jYnaRB3ZvXP/vaq7CI/BSe3qGMTrJ5ZEZLo/McH8SHaFbNpdRUpDDy2mmf20INMv88MIQFcV5HNpcvuFrNAZ8bKoo5EcX0+tBcAMvd4TI8QjvbF57tNBKyoryOLipjJe0IV43PzFWxt+1Y+P6z/F6eGdzFT/pGE6rxX3aEGiA2LD4J53DvLOpCm8S+YJEhHt2Bnmlc1hnxFwnr3QOc8vm8nVHq6zk7p1BzlybYCQ8nyLJsoOX24fZVFF405QSN+NdzQH6J+boHAqnSDLr0YZAA0BXKExoap47m67Psrhe3r0jwOziEieujKVAsuxgfGaBs9cmuLOpKulrvXtHAKXgJx3DKZAsO1hcivJq1zDvSmI0ZmKOKF5OI/1rQ6AB4JXOEQDuaEy+ITqyrQKPoOPZ18GrXSMoBe9sTt4Q76svpbwol1c606chcpq3ro4zvbDEXc3J//7rywppDBSnlZ9AGwINAD/rig2LU1EQ3V+Qy776Ul7rGkmBZNnBTzuH8eXnsL+hLOlreTzCbVsreP2SNsRr5aedw3gE3rE9eUMA8M6mKt64NMpimqzy1oZAw1JU8WrXCHek6CEAuH17Jad6xpld0H6CtfCzrhGObqtY92riRBzdXsnV0Rn6dBjvmnjj0gi760ooLdpY2OhKjm6vZHZxiXPXJlJyPatJVYWyB0WkTUQ6ReSpVfb/NxE5ZbzOiciSiFQY+y6LyFlj3/FUyKNZH619k0zORbgjBf4Bk9sbK1lYinLyqvYT3IyhqTkuDU9zdPv6F5El4ui22LVev6RHZTdjPrLEW1fHuW0Di/gSYV7rzcvpMSpL2hCIiBf4AvAQsBt4TER2xx+jlPozpdRBpdRB4A+Al5RS8Rq6x9h/OFl5NOvH/LEe2ZbaB8HrEV7r1g3RzThxOWYsU9kQtdSW4C/I4XXtp7kp565NMB+JLhvPVBDw57O9qpg30mR6LhUjgiNAp1KqWym1AHwdeOQGxz8GfC0F99WkiBNXxqgvK6S29Pq6xBvFl5/D3vpSPU+9Bt68PEZBroc9dWvP9nozvB7hiPYTrIk3LsUM8eEUGmKIdazeuDRKNA3SfaTCENQDPXGfe41t1yEiRcCDwH/EbVbA90XkhIg8kegmIvKEiBwXkeOhUPp4492OUorjV0a5dcvGF5El4pbNZZzpHU8bh5lTvHl5lIObym5YEnQjHN1ewaXhaYam5lJ63UzjzcujbA8UU+W7eRGg9XBkWwWTcxHaBqdSel0rSMUvb7XVR4lM4PuAV1ZMC92plLqF2NTSx0XkrtVOVEo9rZQ6rJQ6HAgkH+uridE7Nsvg5DyHt6beENy6pZy5xSgX+nVa6kSE5yOc75tI6bSQyS3GCvFTumpZQqJRxfHLoymdFjJJJz9BKgxBLxBfSqkBSJQH91FWTAsppfqMv0PAt4hNNWlswlz0Zc2IIHbNk3phWUJOXR0nqlI/LQGwt76UXK/wlk5Al5CuUJjJuQi3bkm9/hvKCwn489PCEKfCELwJNIvINhHJI9bYH1t5kIiUAu8Gno3bViwifvM98ABwLgUyadbI8Suj+PJz2FWzttqs66GurJCakgJdR/cGnLw6hggc2pz8+oGVFOR62V1bog3xDTCN5MFNqde/iHBoU1laGOKkDYFSKgJ8AngeuAB8Qyl1XkSeFJEn4w79IPB9pVR8dfNq4Kcichp4A/iOUup7ycqkWTsnr4xzcFNZUvmFbsStW8p1qokbcLpnnMaAj5KC1MSvr+TQ5nLO9E7o8pUJONUzjr8gh+0bqL+xFg5uLuPS8DRj0wuWXD9VpMQ7pZR6Tim1QynVqJT678a2LymlvhR3zFeUUo+uOK9bKXXAeO0xz9XYw9ziEm2DUxzYlLpolZUc2lzGtfFZBie1w3IlSilO945b0hs1ObS5jFnje9Zcz1tXY/r3WNQROrTJ8NP0untUoFcWZzHn+yZZiioOpCCtQSLMlNa6UMr1XBufZTi8wAELDYHpp3lLT89dx8xChLaBSUsN8f6GUjzifoe9NgRZzGmjcbayIdpdW4LXI5ztTY+l9nZyuiemk4MWGuKG8kKqfPnaEKzC2d4Josoa/4BJcX4OO6r9rvcTaEOQxZzpHaempIDqkgLL7lGY52VHtZ8zaZJzxU5O946Tl+NhZ43fsnuICPvqS9Im542dnLLQURzPoc1lnO4Zd3WhGm0IspgzvRPsb7DOP2Cyv76Us73ufhCc4NTVcfbUlaR8IdlK9tWX0jE0pRMAruDMtQnqywqpTPFCspXsqy9jYnaRnlH3JgDUhiBLmZhdpHt42tJpIZN9DaWMzSzSO+beB8FulqKKc30TlvpnTPY1lBFV0NqvRwXxXOibZE9d6sOmV7K3PnaPc33u1b82BFmKOVVgy4jAuMdZPT2xzJWRaWYWlmxpiPbVG/rXfpplpucjXBqZTml+p0TsrPGT6xVX//61IchSWvtiaR/22vQg5Hk9nNEN0TKtRtqN3TYYguqSfKp8+dpPE8eF/kmUskf/+TkxP5mb/TTaEGQprf2T1JYWUF6cZ/m98nO87Kr1c/aauyMn7KS1b5Jcr9ActM5RbCIi7G8odXVDZDemIbZjRAaxDtfZaxOu9ZNpQ5CltPZN0lJrz0MAsTDS1r5J1z4IdnO+b5KmoN9yR7HJ3vpSOofCzCxEbLmf2zl/bZLyolxqS62LmItnb0Mp4zOLXHNpxThtCLKQucUlukJhdttoCHbV+BmbWWRoat62e7qZ1v5JW/W/p66EqIKLA3qFMcT0v6euFBFrVhSvZK8x8nDrqEwbgiykcyhMJKpsHRGY92rVKakZmpojNDVvy/y0iWl0LvZrQ7C4FKVtYMpW/bcYCyvPXXPn718bgizETkelyS6jIdK1CeCC0RjbOSKoLyvEl5/DxQGt/86hMAtLUdv8AxDLBLutqti1IzJtCLKQ1r5JivK8bKkosu2epYW51JcVLjeC2YwZsWWnIfB4hJ01fm2IeVv/dhoCiE2Ptg26U/9ZZQhe7Rrhn1697LQYjnOhf5JdNX7LMi4moqW2RDdExEZkDeWFlBZZk3o6ES21fi72T2W9w/583yQFuR62VflsvW9LbQk9o7NMzS3aet+1kFWG4PutA/yP5y6mRTFpq1BK0dpvb8SQSUutn+5QmLnF7E510No3YetowGRXTQlT8xHXRq7Yxfm+CXbVlFhWgyMRO6tjocLtLkwJnhJDICIPikibiHSKyFOr7L9bRCZE5JTx+sxaz00lu2r8zC4u0TM2Y+VtXM218Vmm5iK2+gdMWmpjkSsdg2Hb7+0WZhYidA9PO6T/WEOUzQ5jsyNk97QQwC5D/26cHk3aEIiIF/gCseLzu4HHRGT3Kof+RCl10Hj90TrPTQk7DIvsVoeNHZjzo86MCLTDuG1gKrai1QH976zR+u8dc64jVF9WiN+lDvtUjAiOAJ1GtbEF4OvAIzacu25MQ9CexYbgQv8UIrHRkd1sqSiiKM+b1SGkTkRsmfjyc9hcUZTVHaHzRuI3O3IMrURE2GX4adxGKgxBPdAT97nX2LaSd4jIaRH5rojsWee5KaHYfBBcOEdnF639E2yrLKYoL8f2e+vIlZijsqQgh/qyQkfuv6vGzwUX9kjtorVvEo9DHSGI+WkuDrjPYZ8KQ7Cax2Xlf3kS2KKUOgD8NfCf6zg3dqDIEyJyXESOh0KhDQu7o9pPWxb3iFr7J2lxoDdq4tYHwS5a+ybZXVdi24rWlbTUlnB5eDpraxO09k+xPeCjINfryP131foJz0dcl5I9FYagF9gU97kB6Is/QCk1qZQKG++fA3JFpGot58Zd42ml1GGl1OFAILBhYXfV+Lk0PM18JPsehMm5WHEMJ+anTXbX+pmYXaR/IvuK2S9FFW0DU474Z0xaav1ElTsjV+ygY2hqOXrHCXYZfhq3dUZTYQjeBJpFZJuI5AGPAsfiDxCRGjG6QCJyxLjvyFrOTTU7avwsRRVdQ9NW3saVXHRgRetKstlh3DM6w+zikmPTEvB2Q+RGh6XVzC4scXV0huZqe9cPxGOWJXWb/pM2BEqpCPAJ4HngAvANpdR5EXlSRJ40Dvtl4JyInAY+DzyqYqx6brIy3QjzIXTrCj8raTN+fGYYmxNkc6oJsxfe7GCPdLPhsHdjCKPVdA6FUQpHRwS+/Bw2VRRywWUjgpR4DI3pnudWbPtS3Pu/Af5mredaybaqYnK9QttA9sWytw+G8efnUGNhsfqbYUauZGND1DEU+801B53rkWazw94NhhgMP5nL9J9VK4sBcr0eGgO+5d5xNtE+OEVztc8xR6XJrhq/64bGdtA+OEVdaQH+AntTS6wkWx327YNT5Hk9bK20L8fWarQYfko3rbDPOkMAsXk6tzlr7KBjKLy8lsJJdlT7uTwyw0Ik6rQottI+GHa8NwrZ67BvH5xie6CYHK+zzd4uF66wz1pD0Dcxx6QLkz9ZxUh4ntHpBZocnJYwaa72sRRVXBrOHof9UlTRFQqzw0FHpYnZGTCnqrKF9kF3dIRMP6Wb1nNkpyHIwhXG7Ubvww0Pglmnt2Moe/R/ZWSahUjUFSMCU4aOLAohDRvJ9nY6GLFlsqWymIJcj6tmJbLTENRkX84hs9F1gyHYHijGI+4aGluNmwxxRXEelcV5dGbRiMA0ek466k28HmF7lc9V+s9KQ2BWa8qmRTXtg1P4C3KoLsl3WhQKcr1srijKqhGBmxoigKagL6umhsxn3Q2GGGLTo9oQOIyIsKPal1UjAnN+1OmIIZPman92jQiGwjSUF1Kcb3+Op9VorvbRPpg9kUPtg2EKcj1ssrEq341oDvq4Nj5LeD7itChAlhoCiKXkbcuiELrOobBreqMQexAuDU+zuJQdkUMdg1Ou6Y1CrGc8NRdhaGreaVFsoX1wiqagz/ZiNIloMvxkXS4ZFWStIdhR7WNidpFQOPMfhGEjYsgNjkqT5mofkajichZEDi0uRekKhR1NbbASM3osW0Zl7YNT7Ai66/cP7oncylpDYD4Ibpqns4q350fd0xC9HTmU+fq/MjLN4pJyV0OURZFbE7OLDE7Os8MFEUMmWyqKyPWKa9qfrDcEbhmaWUmHiyJWTBoDPiRLIofcFDFkUuXLo6woNysMcYcLO0I5Xo8ROeQOQ5y1hqCmpABffo5rLLKVtA9OUVKQQ9DvfMSQSWGel03lRbS75EGwkvbBWFU4NyzmMxERmoM+OrPAELe5LGLIpKnaPZFbWWsIRITGoI/OkDu+CCvpMFIbuCViyGRHdXY0RB2DYTaVF1GY50wxlEQ0Bf20D2V+wETHYJjiPK9jVeES0Rz0cXV0xhU5h7LWEAA0BXwZPzWhlKJ9aMpVw2KTpqCf7uEwkQyPHGofdKf+m4M+xmcWGQ4vOC2KpXQMxSKG3NYRagr6UAq6XNAZzW5DEPQxNDWf0TmHhsMLjM8sLjsH3URz0MfikuLyyIzToljGQiTKpeFpV0Vsmbydcyizp+c6h8LL4Zpuwnwm3TA9nfWGANzxRVhFh0vnR+FtmdziMLOCq6PTRKLKVWs4TMwQxkz+/U/OxSKGGoPFTotyHVurivB6xBWzEikxBCLyoIi0iUiniDy1yv5fE5EzxutnInIgbt9lETkrIqdE5Hgq5FkrzdlgCMxiKC6cmjAfTjc8CFbRaZREbQy4T/9Bfz7+gpyM1r8ZFdjkQv3n53jZUlnkivYn6fXuIuIFvgDcT6wY/Zsickwp1Rp32CXg3UqpMRF5CHgaOBq3/x6l1HCysqyXTRVF5OV4XPFFWEVXKFaVzE0RQyZFeTk0lBfSnuH6h1iiPbdhRg5l8tRQVyhmiN0UsRWPW/SfihHBEaBTKdWtlFoAvg48En+AUupnSqkx4+NrQEMK7ps0sSyAxRlvCLa70FFmsqPan9HpkLtCYWpKnK9KlojmoD+jf/+dQ2FyvcJml+QYWklz0B1FmlJhCOqBnrjPvca2RHwM+G7cZwV8X0ROiMgTiU4SkSdE5LiIHA+FQkkJHE9j0F1ZAFNN19A0jS7sjZo0BX10D0+zFM3MEMau0LQr56dNmqt9DIcXGJ3OzMihzqEwWyudr0qWCLNI0+URZ1OtpEI7q3U1V32qReQeYobg9+M236mUugV4CPi4iNy12rlKqaeVUoeVUocDgUCyMi/TFPDRM+aOWN5UE56PMDA559phMUBjoJiFSJTescyLHFJK0TUUdqV/wOTtnEOZOSrrDqWL/p3tjKbCEPQCm+I+NwB9Kw8Skf3A3wOPKKVGzO1KqT7j7xDwLWJTTbbRXB2L5e0OZV7yM9NRlg4PghtiqVPN0NQ84flImug/837/C5EoV0ZnXN4RMlKtOOwnSIUheBNoFpFtIpIHPAociz9ARDYDzwAfUUq1x20vFhG/+R54ADiXApnWzLJFdoHDJtWYjaubGyJTtkycnluOWHFxQ1RXWkhhrjcj9X95JDbl6Gb9F+TGUq04rf+ko4aUUhER+QTwPOAFvqyUOi8iTxr7vwR8BqgE/tZwWkaUUoeBauBbxrYc4F+VUt9LVqb1sK0qVjYxE5PPdYXC5HiELZXudJQBlBXlUeXLzLKJ6WCIPR5he6A4I0dk6TAihtj0qNMjspSUS1JKPQc8t2Lbl+Le/xbwW6uc1w0cWLndTvJzYmUTMzHnUNfQNJsri8h1qaPMpDHgc/xBsIKu0DTFeV5XlAe9EU1BH8cvj938wDTD7Fy42VkPsd//z7pGiEYVHocK57i7hbCJpgwNoetyuaPMxIzcyrTkZ51DYRpdHLpr0hiIlU2cWXBH2cRU0RkKU19WSFGeO8qDJqIp6GM+EuXa+KxjMmhDQOyLuDQ8nVHJzyJLUS6PTKeFIWgKxKrFjWRYCGNXKOzKFa0rMefQMy1goisUduVCvpU0mhkOHJyV0IaA2IOwuKToGXPOIqeanrFZFpeUq9cQmGRizqfwfIT+ibnlh9zNZGLkVjSq6BqadrWj2MTsrDnpp9SGAJYby0xqiJYdZenwIGSgIbgUMnMMud8Qb6kswiOZpf++iVlmF5fSwhBUFOdRUZznqCHWhgDYHsi8HlFnGkSsmNSVFlCU580o/adDxJBJLPlZZkUOdYXcm+xvNRoDxXQNOTc1pw0BUFqYS8Cfn1EhpF1DYQL+fEoL3ZnjJh4RoTGQWak+OofCeD3Clkr3jwiAjNQ/uHsNRzyxyDk9InCcxgyLpY5FDKVHIwQx/WeSs7IrFGaLkd02HWgMFmdUwETnUJjSwlwqi/OcFmVNNAV9jEwvMOZQwER6/EptoCkYi2XPhBBGpVQs2VmaDIshpv9r47NMz2dGCGMsYiWN9B/IrIAJsyPk9tBdk0aHp6e1ITBozKAQxpHpBSZmF9PKEJiyZsKoILIU5fLwjOsXMsWTaZFb3aFw2kwLgTYErsENIVypIp0ihkwyKYSxd2yWhaVoehniDDIE4zMLDIcX0kr/9eWF5DtYJEsbAoPGDMrC2JVGoYsmWyqL8XokIxoi05ilU4+0pCCXoD8/IwxxukUMQaxI1rYq53IOaUNgUFtSkDFZGLtCYQpyPdSVFjotyprJy/GwpcL5LIypYDnHTVX6NEQQM1yZoP90NMQQ64zqqSGHyaQsjJ1DYbZX+RxLYLVRnHwQUklXKEyVL5/SIveH7sbTFPTRlQE5n7pCYfK8HhrK06cjBEaRrFFnimRpQxCH07G8qaIrFE4r/4BJY8DH5ZH0D2GMRWylz7ScSWPAx9R8hNDUvNOiJEXX0DRbq4pcW54yEY1BH1GFI2Ur00tTFmNmYZxdSN+ylbMLS1wbn03LhsjM+XR1NH3LViqllrOOphuZEjnk9vKUiTCfWSdWGGtDEEdjsBil4NJw+jqMLw1Po1T6zY9CZjREo0bobjpkHV1JkwuyYCaLWZ4yHQ3B9qpY2UonZiVSYghE5EERaRORThF5apX9IiKfN/afEZFb1nqunWRCCGM65bhZiZkyOJ0bos40DN01Cfrz8eXnpHUI9dXRWHnKdFrDYVKY56W+rNCRjlDShkBEvMAXgIeA3cBjIrJ7xWEPAc3G6wngi+s41za2VhY7ZpFTRVcojEisBGe6UVKQS3VJvqPJt5IlHUN3TUQkViQojX//nUPpFzoaj1N+ylSMCI4AnUqpbqXUAvB14JEVxx6dW0MAACAASURBVDwC/KOK8RpQJiK1azzXNsxC0um8lqArNE1DeSEFuV6nRdkQjYH0boi6QmEKc71pFbobT1OaJ58zG9F0Su8RT2PAR3dommjU3sitVBiCeqAn7nOvsW0tx6zlXABE5AkROS4ix0OhUNJCJ6IxUJzeD8JQejrKTJqCPrrTOISxKxRmW1Vx2oXumjQGixmcnGdqbtFpUTZEVyhMTUkBvnx3l6dMRFPQx+ziEn0T9uZ8SoUhWO0Xv/IpTnTMWs6NbVTqaaXUYaXU4UAgsE4R107MIodtt8ipIBpVdA/H1hCkK03BWAjjUJqGMHalWY6blTQF0nuFfVdoOi39AybLkUM26z8VhqAX2BT3uQHoW+MxaznXVhpdUEh6o1wbn2VuMZrWDZE5mknHUdnc4hK9Y7NpPyKD9NS/UoruNB8RL6e6sVn/qTAEbwLNIrJNRPKAR4FjK445BnzUiB66HZhQSvWv8VxbcToLYDK8HTGUvj2idI7c6g7FQnfTuUe6uaKIXK+kpf5DU/NMzUfS2hBUFudRVpRru58s6Yk0pVRERD4BPA94gS8rpc6LyJPG/i8BzwEPA53ADPBfb3RusjIlQ/zQ7O6dTkqyftKtKtNqmCGM6dgjTdccN/HkeD1srUxPP1k6lWdNhFmtz+4RQUo8Kkqp54g19vHbvhT3XgEfX+u5TlLpy6e8KDcte0RdoWnKinKpSJOqTKthhjCmp/5jobtb06Q8ZSKagj7aBqacFmPdLIfupvGIDGKd0R9etC4gZjX0yuJVcMIip4IuY2l9ulRlSkS6hjB2DoXZVF6UtqG7Jo0BH1dGZ1iIpFfOp66hMEV5XmpKCpwWJSmagj6Gw/NMzNgXuaUNwSqka/K57lA4LVMbrMQMYZxMsxDGdE02t5KmoI+lqOKKA8nPkiFTOkLLARM2tkHaEKxCY7CY4fAC4zPpU7ZyuSpTmg+L4e0QxnQqWxmNqrRNdraSdI0c6s4QQ+xEwIo2BKvQmIax1OmcY2gl6Vg28dr4LPOR9A7dNdm+HDCRPvqfWYgYWXfTX/+bKorI83psnZ7WhmAV0jGE1MzPkwkN0RYjhDGdDMGyIc4A/Rfl5TiW/GyjdC87itNf/2+XrdSGwFEaygtjFjmNDEHnclWmIqdFSRozhDGt9D+UOSMyIO2Sz2XSiBhi09N2zkhoQ7AKOV4PW6uK0ioLZtdQLMeNN01z3Kwk3SK3ukLTlKd56G48jYFiuobsT362UbpC03gEtlSmf0cIYn6yKyPTzEfsKZKlDUECmoKxnEPpQrrnuFlJUzC9Qhi7MsRRbGImP+ufnHNalDXRFQqzqSL9Q3dNzLKVV0bsqdanDUEC0imWem5xiaujMxkRMWHSGCxOqxDG7kwzxGmW8ynds+6uZNlPaZP+tSFIQGMgfWKpr4zMEFWZ4SgzaQr4gfRoiJZDdzOpIXIo+dlGWIoqLg1nRuioyXK1Pm0InCWdIocyzVEG6RXC+HbEUOY0RE4lP9sIfUbobib9/s3ILbt+/9oQJGC7Q3nBN4LZa9ueQT2i4vwc6koL0mJE0JXm5RFXQ0TSJtVHZwaF7sazPWBf5JA2BAkozs+htrQgLYbGnaEw9WWFFOWlZ1WmRMSSz6WBIQ6FycvJjNDdeMwiTW6nK8NCd02ajOSLdkRuaUNwA9Il51BXKJxxvSF4W/9uD2HsHAqzPYNCd01iyc/cn2ol00J3TRoDPmYWlhiwIXJLG4Ib0GgMzdxcPzcaVXQNZZajzKQpGHsQ3B7CmGmhoybpknMoU/Vvp59SG4Ib0BT0EXZ5/dz+yTlmF5cy8kFoSoPIlflI5oXumqRLwESmJPtbiZ2GOClDICIVIvKCiHQYf8tXOWaTiPxIRC6IyHkR+WTcvs+KyDUROWW8Hk5GnlRjdyzvRujKgKpkiUiH+sWZGLprUl9eSH6Ox9X6z6Ssuyup8uVRUpCTFiOCp4AXlVLNwIvG55VEgN9VSrUAtwMfF5Hdcfv/Uil10Hi5plIZxGXBdHGPKBNDR02qfHmUFrq7WlymOiohlvxsu8sjh5arkmWg/per9dmQ6iZZQ/AI8FXj/VeBD6w8QCnVr5Q6abyfAi4A9Une1xbM+rluHhF0DoUpKcihypdZjjIw67e6u35uZwaG7sbTaGMI40bI5I4QxP4vOzqiyRqCaqVUP8QafCB4o4NFZCtwCHg9bvMnROSMiHx5tamluHOfEJHjInI8FLKnnqfZELn9QWgKpn9VpkSYIXRupStDQ3dNmoI+esZmmFu0J/nZeulazrpb6LQoltAU9BGammdi1tpqfTc1BCLyAxE5t8rrkfXcSER8wH8An1JKTRqbvwg0AgeBfuDPE52vlHpaKXVYKXU4EAis59ZJ4fYQ0lh5xMzsDYH7Qxi7QtMZOxqAmP6Vcm+1uK6habZWFZHjzcy4F/PZtno9x021p5S6Tym1d5XXs8CgiNQCGH+HVruGiOQSMwL/opR6Ju7ag0qpJaVUFPg74Egq/qlU0hj00T8xR3g+4rQo1zExu0hoaj4jHZUmbo5cUUplXNbXlbhZ/xCTa3tVJuvfnpxDyZrRY8DjxvvHgWdXHiCxOYt/AC4opf5ixb7auI8fBM4lKU/KMb+ISy7sEZkPZyYUrE/E2yGk7tN//8QcMwuZGbprsq2qGI+4M3JrbnGJKyPTNFdnrv43G9X6rJ6eTtYQfA64X0Q6gPuNz4hInYiYEUB3Ah8B3rNKmOifishZETkD3AP8TpLypBw394iWI1YyuEfaUF5EXo7HlZFbmVaVbDUKcr1sqihypf67Q9NEFTRX+50WxTLsqtaXlIdLKTUC3LvK9j7gYeP9T4FVPZlKqY8kc3872FIZSx3gRkPQGQqT6xU2ZaijDIwQxip3Rg61D04BsCODe6Tg3mpxHUPZo/9243+1isz0sKSQvBwPWyqKXNkQdQ7G5kcz1VFm0ujSyKGOwTCVxXlU+vKdFsVSmoI+uoenWXJZzqf2wanlQu+ZTFPQx5URa4tkZXYLkiKagr7l3p+baBucyuj5UZPGgI+eUfeFMLYPZYf+mwI+FiJResfsKZu4VjoGw2ytLCI/JzPKUybCrNZ3ddQ6P4E2BGtgZ42fyyMzthWSXgvT8xF6x2bZmcHzoyZNRv3WS8PucRgrpegcDLMjC/Rvpm9w26isYyhMczAL9L+cakUbAkdprvazFFWuiqU2p6oy2VFm0ujCamX9E3NMzUeyQv9uLBtqRgxlun8A7AlY0YZgDZi9bjdND7UZsuysyfyGqDHgQ1wWwrjsKM7giC2T0qJcqnz5rtJ/Vyic8RFDJnYUydKGYA1sqyomxyO0DbjHEHQMTpGf42FzRWZVxVqNglwvDeWFrkr10TEYeyizYWoI3JdzyDRK2aL/pqCPDm0InCUvx8O2qmLaB93TI2objK1ozbSqWIlodFkWzLbBKap8+ZRnWFWsRDQFY/p3S5GmbIkYMmkO+ukYmrKsWp82BGtkR43fVVNDHYNTWdMbgljkSnco7JoQxpj+M39ayKQp6GNidpHhsDtyPrUbEUN5OdnRhO2s8TG3GKXHosit7NBiCtgR9NMzNsPMgvM5hyZmF+mfmMuK0EWTxqCP+UiUvvFZp0UhGlV0DGVHxJCJ21bYZ1tHqHnZT2mN/rUhWCM7a2JZGN0wPdFprDLMhtBREzfVz702PsvMwlJWGWI36X9uMVYeNBscxSbNhv6tmpXQhmCNWG2R10N7ljkq4e3Eem7okb6d2iB79F9bWkBRntcVhsCMGMqmqTl/QS71ZYXaEDjNlorYfKQb/ARtA1MU5XmpL8vcHEMrKS/Oo7I4zxX6XzbEWbCYySRWpMkdqT6yLWLLZEe1z7LIRW0I1kiO10NTwLovYj10DE3RHPThyZKIIZNdtX4uukD/7YNTBP35lBblOi2KrZiRQ07TPjhFjkfYWpkdEUMmO6r9dIemiSylPueQNgTrYEe1jw4X9EjbBsJZNT9qsqumhLaBKccjhzoGw1mxkG8lO6r99E/MMTFjbdnEm9ExFGZrVXHWRAyZ7Kj2s7AU5fJI6iOHskuTSbKjxk/fxByTc849CGPTCwyH57PKUWzSUlvCfCTK5RHnFjZFo4rOLMlxs5KW2tj/fHFg8iZHWku2he6amFNhVpStTMoQiEiFiLwgIh3G31WLz4vIZaMAzSkROb7e892COSfc4aDD2Jwjz6aIFZNdRi/8Qr9zDVHv2Cyzi0tZ2RDtri0BnNX/zEKEK6MzWecfgNjU6KnP3M8De2pSfu1kRwRPAS8qpZqBF43PibhHKXVQKXV4g+c7jjkd4KTDsj2LcgytxFxJfbHfOf23LRvi7NN/wJ9PRXEeFxzU/8WBKZR62yhlE7leD2VF1qxkT9YQPAJ81Xj/VeADNp9vK/VlhRTleR02BGH8+TnUlBQ4JoNTFOR6aQwUOzo10do3icjbo5NsQkRoqfVzwWH9A+yuyz5DYCXJGoJqpVQ/gPE3mOA4BXxfRE6IyBMbON8VeDxCc9DZyKEL/ZPsrPEjkl0RQya7akoc7ZG29k+wrbKY4vykqrymLS2Gw96KyJW1cKF/kpKCnKwKnbaDmxoCEfmBiJxb5fXIOu5zp1LqFuAh4OMictd6BRWRJ0TkuIgcD4VC6z09ZbTUltDaP+lI8q1oVHGhf5I9Wdwb2lXr59r4LBOzzjjsW/snaclq/TvrsG/tn6SltiRrO0JWcVNDoJS6Tym1d5XXs8CgiNQCGH+HElyjz/g7BHwLOGLsWtP5xrlPK6UOK6UOBwKB9fyPKWVPXQnjM4v0TczZfu/LI9NMLyyxp67U9nu7hRZjbtiJUdnE7CI9o7NZOT9tYkYOOTEqW4oq2gamln8DmtSR7NTQMeBx4/3jwLMrDxCRYhHxm++BB4Bzaz3fbew2GuHz1yZsv3drv54fbamJ/e9O+Akuav3TFPSR4xFHIoeujEwzs7CU1fq3imQNweeA+0WkA7jf+IyI1InIc8Yx1cBPReQ08AbwHaXU9250vptpqfUjAuf77H8QzvdNkuORrAwdNakuyaesKNeRhsg0xHuyuEean+OlKehzVP/ZPCKziqQ8XkqpEeDeVbb3AQ8b77uBA+s5380U5eWwvarYMUPQXO0nP8dr+73dgojQ4pDDuLVvkipfHgF/vu33dhO7avy81j1q+30v9OuOkFXolcUbYE9dKa19DkwN9U3q3hAxh3HbgHXVmhKhHZUxWmpLGJicY2za3iI1rX2TNAV9Wd0RsgptCDbAnroS+ibsfRCGJucYDs9ndcSQSUtNCbOLS1wZtaZa02osRKJ0DIb1/DRvO+ztXk9woV87iq1CG4INYEbt2Dk9dN6cn9YNEbvMnDc2zlN3DoVZWIpmdcSWybIhsHF6bnR6gYHJOT0itghtCDaA2Rift3F6yFxRmc0x7CY7qv14BC7YGEKqHZVvE/DnU+XLs9VhbN5LjwisQRuCDVBenEddacFy42AH5/sm2FxRRElBduXAX42CXC/bAz5b/TStfZMU5HrYVpVdOfAT0VJbYqshWO4I1WZfag870IZgg+yuK7V1akg7in+effWlnLVxLUdr/wS7akrwZlkxoES01JbQMRhmIWJPqonW/kmqS/Kp9GV3xJZVaEOwQfbUldAdCjO7sGT5vabmFrk8MqP9A3HsrS9lcHKeoUnrV3grpWKGWOt/mb31pSwsRW1LwHihX3eErEQbgg2yu66EqLIncsJ0yu2p1w+Cyf6GmNPWjlHBtfFZJuciuiGKY3+9ffqfW1yicyis/QMWog3BBnnbYWy9ITDnwnfX6ogVk921JYjAmV7rG6LzfdpRuZItlUWUFOTYov/W/kkiUcX+hjLL75WtaEOwQerLCiktzLXFYXm+b5LK4jyqS/T8qElxfg5NAR/nbOiRnukdx+sRPTUXh4iwv6GMs9fGLb/XmZ7YPQ5s0h0hq9CGYIOICHvrSzh3zfoRwZneCfbUl2b9itaV2OUwPt0zwa4aPwW5ekVrPPsaSmkbmGJu0Vo/2eneCQL+/KwsxmQX2hAkwf6GMi70T1r6IITnI7QPTXFwkx4Wr2RfQylDU/MMWJgSPBpVnO4d54DW/3UcaChlcUlZnhL8dO84BxrKdEfIQrQhSIKDm8qIRJWlC8vO9k6gFBzarBuilZiN86ke66YnLo1MMzUX0YZ4FfYZc/Zneq3T/+TcIt2haQ406GkhK9GGIAkOGY3DW1etexDMRu6gdpRdx566EvK8Ht66OmbZPU6b+teG4DrqSguo8uXzloWG+JzhjN6v9W8p2hAkQbCkgPqyQksfhFM9Y2ytLKK8OM+ye6Qr+TledteVWGqIT/eMU5znpTGgUx+vRES4ZXOZtR0hY7RhhqtqrEEbgiQ5uLmMUxaPCHRvNDG3bC7nzLVxFi0qpn6qd4J9DaV6RXECbtlSzqXhaUbC85Zc/+SVMbZXFeuOkMUkZQhEpEJEXhCRDuNv+SrH7BSRU3GvSRH5lLHvsyJyLW7fw8nI4wSHNpVxbXzWkhWu/ROzDE7Oa0NwAw5tLmNuMcpFCzJhzi4s0do3waHN1/2sNQa3GLqxYlSglOLElTFu3aL1bzXJjgieAl5USjUDLxqffw6lVJtS6qBS6iBwKzBDrIC9yV+a+5VSz6083+2YjfRJCx6EE1dic9+6IUrMLUYjcdICP8Hp3nEWlxS3bdX6T8T+hlJyPMIJC/TfFZpmbGaRw1r/lpOsIXgE+Krx/qvAB25y/L1Al1LqSpL3dQ1760vJy/Fw/HLqS/e9cWmUojyvXsh0A+pKCwj68y0xBG9ein2nt26uSPm1M4WC3Njv8+SV1Ov/xBVD/1u0/q0mWUNQrZTqBzD+Bm9y/KPA11Zs+4SInBGRL682tWQiIk+IyHEROR4KhZKTOoUU5Ho5uKmMNywyBLduKSfHq105iRARbttawRuXRlEqtaUr37wyxs5qP6VFOvX3jTi0udwYPaXWT3P88hjlRbk0BnTqb6u5aQsjIj8QkXOrvB5Zz41EJA94P/DNuM1fBBqBg0A/8OeJzldKPa2UOqyUOhwIBNZza8s5uq2Cc9cmCM9HUnbNiZlF2ganuG2r7g3djKPbK+ifmKNndDZl11yKKk5eGdPTEmvgyLYK5hajKc87ZPoH9EIy67mpIVBK3aeU2rvK61lgUERqAYy/Qze41EPASaXUYNy1B5VSS0qpKPB3wJHk/h1nOLKtgqgipcPj41dGUQptCNbA0W2VALx2aSRl17w4MEl4PqL1vwaObovp6LXu1Ol/ODxP9/C0nhayiWTnHI4BjxvvHweevcGxj7FiWsg0IgYfBM4lKY8j3LK5HK9HeONS6qaH3rg8Sq5X9IriNdAc9FFRnMfr3SnUv/Fd6hHBzan05bOz2p9SQ/Czrti13tFYmbJrahKTrCH4HHC/iHQA9xufEZE6EVmOABKRImP/MyvO/1MROSsiZ4B7gN9JUh5HKM7PYW9dCa+nsEf6evco+xvKdKKzNeDxCEe2VqRU/690DrOlsoiG8qKUXTOTuX17Bccvj6WsYtmrXcP4C2LPlcZ6kjIESqkRpdS9Sqlm4++osb1PKfVw3HEzSqlKpdTEivM/opTap5Tar5R6v+l4Tkdub6zkVM840ynwE0zMLHKmd5w7m6pSIFl2cHR7Bb1js1wbT95PsLgU5bXuUa3/dfCOxkpmF5dSlpb6lc4Rjm6r1IESNqG1nCLuag6wuKR4tSv5XunPuoaJKrirWTdEa+WOxpiuftqRfETZmd5xwvMR3qkNwZo5YvhpUvH77xmd4eroDHc26Wkhu9CGIEUc3lpOYa6Xn6SgIXq5Yxhffo5OfbwOdlT7qCkp4KX25PX/044RROAOPT+9ZiqK89hTV8LL7cNJX8s0JnpEZh/aEKSI/Bwvt2+v4OWO5B4EpRQ/6QjxjsZKcvWweM2ICHfvDPCTjmEiScazv9I5zL76UsqKdH6b9XDPziAnro4xMbOY1HVe6ggR8OfTHNSJ/uxCtzQp5F3NAS4NT9MzOrPha1wZmaF3bFZPC22Ad+8IMDUXSSob7MTMIievjulpoQ1wz64gS1HFy0mMihciUV5uC/GenUG9fsBGtCFIIe/eGVvo9qO2Gy2nuDE/uBBbZnHXDnctmksH7miqwusRXmrbeEP0o7YhIlHF/burUyhZdnBwUxllRblJ/f7fvDzK1HyEe1tulqRAk0q0IUgh26uK2R4o5nvnBjZ8jefPD7Crxs+WSr2sfr2UFuZy6+byZWO6Eb7fOkDQn88BXQho3Xg9wrt3BHipLUQ0urF0Hy+0DpKf4+GdekRsK9oQpBAR4aG9Nbx+aZTR6YV1nz80NcfxK2M8tLf25gdrVuWhfTVcHJiicyi87nPnFpd4qS3Efbur8ej6Axvi3pZqRqYXOL6BVfZKKV68OMidTVUU5eVYIJ0mEdoQpJiH9tayFFW80Lr+UcELrYMoBQ/urbFAsuzgob21iMBzZ9e/JOXVrhGmF5Z4QE8LbZh7dwUpyPVw7PS1dZ97vm+SntFZ7mvR+rcbbQhSzJ66EjZVFPLdDUwPfffsANuqitlRraMlNkpNaQG3bangO2fWbwiePXUNf0GOTmuQBMX5OdzXUs1zZwfWnY30W29dI9crPLxPd4TsRhuCFCMivHdfHT/tGF5X1bJr47O80jXM+/bX6miJJHnv/lraBqdoH1x71bLJuUW+e26A9x+oIz9Hp/VIhvcfqGN0eoFXOtceSh1ZivLsqT7esyuow3YdQBsCC/jwbZuIRBXfPNG75nO+8WYPAL962yarxMoa3ru/llyv8LU3rq75nG+f7mc+EuVXD2v9J8u7dwYoKcjhmZNrnx76Secww+F5PniowULJNInQhsACtlUVc0djJV974+qaoieWoopvHu/hXc0BneQsBVT58nl4Xy3/fqJ3zbmfvnmihx3VPvY3lFosXeaTn+Pll25t4Lmz/QyucVT8jTd7KCvK5Z5dOmzaCbQhsIjHjmymd2yWl9awuOaHF4fom5jjMT0aSBkffccWpuYi/Oepm/dKT1wZ462r43z4ts16Wi5F/MYdW1lSin9+7eZVabtDYb53foBfO7pZT8s5hDYEFvELe2qoKSng8y923LCEolKKv/pBO5sqCrlPR6ukjFs2l7O7toR/+Omlm6ac+OsfdlBRnMdjR7QhThVbKou5d1c1//L6VWYXlm547NMvd5Pn9fAbd2yzSTrNSrQhsIi8HA+fuq+Zt66O80Jr4gVO3zs3wPm+ST517w6dWyiFiAifvK+Z7tA0XzP8L6txumecH7eF+K13bdOx6ynmf3/3dkanF/jSS10Jj+kZneGZk9f41cObCPjzbZROE49ueSzkl29tYHtVMX/yvYvMLV7fK5qej/Cnz7fRGCjmA4fqHZAws3lgdzVHt1XwVy+0Mzl3fSK0hUiU//s/z1FelMtHbt/igISZzW1bK/jF/bV88aUuro5cn39LKcUfPHOWXK/w5N2NDkioMUnKEIjIr4jIeRGJisjhGxz3oIi0iUiniDwVt71CRF4QkQ7jb0bVBczxevjM+3bTFZrm098693NTREop/q9vneXKyDR//MhevHola8oREf6fX9zN+Owin/r6qeumiP7qB+2cvTbB//zQfvwFuQ5Jmdl8+r0t5HiE3/3mqeumiP759av8tHOYpx5uob6s0CEJNZD8iOAc8CHg5UQHiIgX+AKx4vW7gcdEZLex+yngRaVUM/Ci8TmjuHtnkE/e28x/nOzls8fOE56PMD0f4dP/eY5nT/XxO/ft4A6d6dIy9taX8keP7OGHF4f4vW+eZiQ8z+JSlM999yJ/++MuPnx4k17JbSG1pYV87pf2c/zKGE/803H6J2ZZiET5+59085lnz/Gu5ip+7chmp8XMepKaFFVKXQBuFmlxBOhUSnUbx34deARoNf7ebRz3VeDHwO8nI5Mb+eS9zUzMLvKVn13ma2/0oFBEoor/7V3b+Pg9TU6Ll/H82tEthKbm+fyLHTx3doCoiun/vxzdzB++b/fNL6BJivcfqGN2IcIfPHOWOz/3Q/JyPMwtRnlwTw1/9ehBndfJBdjhHasH4r11vcBR4321WadYKdUvIglzz4rIE8ATAJs3p1cPwuMRPvv+PbzvQB3fPduP1ys8sLuaW7dUOC1a1vCp+3bwi/vr+NfXr1KQ6+G2rRXcs0unOraLD9+2mTsaq/jG8R7C8xFu317JfS3VekrUJdzUEIjID4DVxs6fVko9u4Z7rPZNrztHrVLqaeBpgMOHD28sx63D3LqlnFu3ZJQbJK1oCvr4jB4BOMamiiJ+94GdTouhWYWbGgKl1H1J3qMXiA/QbgD6jPeDIlJrjAZqgY1XtNBoNBrNhrAjfPRNoFlEtolIHvAocMzYdwx43Hj/OLCWEYZGo9FoUkiy4aMfFJFe4B3Ad0TkeWN7nYg8B6CUigCfAJ4HLgDfUEqdNy7xOeB+EekA7jc+azQajcZG5EbpD9zK4cOH1fHjx50WQ6PRaNIKETmhlLpuzZdeWazRaDRZjjYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+WkpbNYRELAzSterE4VsPZiqvbhVrnAvbJpudaHW+UC98qWaXJtUUpdVwYuLQ1BMojI8dW85k7jVrnAvbJpudaHW+UC98qWLXLpqSGNRqPJcrQh0Gg0miwnGw3B004LkAC3ygXulU3LtT7cKhe4V7askCvrfAQajUaj+XmycUSg0Wg0mji0IdBoNJosJyMNgYj8ioicF5GoiBxese8PRKRTRNpE5BcSnF8hIi+ISIfxN+XVZETk30TklPG6LCKnEhx3WUTOGsfZkmlPRD4rItfi5Hs4wXEPGnrsFBHL602LyJ+JyEUROSMi3xKRsgTH2aKzm/3/EuPzxv4zInKLVbLE3XOTiPxIRC4Yz8AnVznmbhGZiPt+P2O1XMZ9b/i9OKEv474743Rx24NICAAABE5JREFUSkQmReRTK46xRWci8mURGRKRc3Hb1tQeJfU8KqUy7gW0ADuJ1UA+HLd9N3AayAe2AV2Ad5Xz/xR4ynj/FPAnFsv758BnEuy7DFTZrL/PAr93k2O8hv62A3mGXndbLNcDQI7x/k8SfS926Gwt/z/wMPBdYlX6bgdet+G7qwVuMd77gfZV5Lob+Ladv6m1fC9O6CvB9zpAbOGV7ToD7gJuAc7Fbbtpe5Ts85iRIwKl1AWlVNsqux4Bvq6UmldKXQI6gSMJjvuq8f6rwAeskTTWCwJ+FfiaVfewiCNAp1KqWym1AHydmN4sQyn1fRWrbwHwGrFqd06xlv//EeAfVYzXgDKjEp9lKKX6lVInjfdTxGqA1Ft5zxRiu75W4V6gSym10cwFSaGUehkYXbF5Le1RUs9jRhqCG1AP9MR97mX1h6RaKdUPsQcLsLLK+buAQaVUR4L9Cvi+iJwQkScslGMlnzCG519OMBRdqy6t4jeJ9R5Xww6dreX/d1RHIrIVOAS8vsrud4jIaRH5rojssUmkm30vTv+mIFZBMVGnzAmdwdrao6R0d9OaxW5FRH4A1Kyy69NKqUQlL2WVbZbFz65Rxse48WjgTqVUn4gEgRdE5KLRa7BMNuCLwB8T080fE5u6+s2Vl1jl3KR1uRadicingQjwLwkuY4nOVoq6yraV/7+tv7efu7GID/gP4FNKqckVu08Sm/oIG/6f/wSabRDrZt+LY/oCkFgp3fcDf7DKbqd0tlaS0l3aGgKl1H0bOK0X2BT3uQHoW+W4QRGpVUr1G0PTIStkFJEc4EPArTe4Rp/xd0hEvkVsCJh0o7ZW/YnI3wHfXmXXWnWZUrlE5HHgF4F7lTE5uso1LNHZCtby/1uio5shIrnEjMC/KKWeWbk/3jAopZ4Tkb8VkSqllKXJ1dbwvTiirzgeAk4qpQZX7nBKZwZraY+S0l22TQ0dAx4VkXwR2UbMor+R4LjHjfePA4lGGMlyH3BRKdW72k4RKRYRv/memLP03GrHppIV87IfTHDPN4FmEdlm9KQeJaY3K+V6EPh94P1KqZkEx9ils7X8/8eAjxrRMLcDE+YQ3yoMn9M/ABeUUn+R4Jga4zhE5AixdmDEYrnW8r3Yrq8VJBydO6GzONbSHiX3PFrtBXfiRazx6gXmgUHg+bh9nybmXW8DHorb/vcYEUZAJfAi0GH8rbBIzq8AT67YVgc8Z7zfTsz7fxo4T2x6xA79/RNwFjhj/JhqV8pmfH6YWFRKlx2yEXPu9wCnjNeXnNTZav8/8KT5nRIbrn/B2H+WuAg2C2V6J7EpgTNxenp4hVyfMHRzmpjT/Q4b5Fr1e3FaX3HyFRFr2EvjttmuM2KGqB9YNNqwjyVqj1L5POoUExqNRpPlZNvUkEaj0WhWoA2BRqPRZDnaEGg0Gk2Wow2BRqPRZDnaEGg0Gk2Wow2BRqPRZDnaEGg0Gk2W8/8DSqD8M+WcWt0AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "y = x.sin()\n", "pl.plot(x.numpy(), y.numpy())" @@ -647,9 +1354,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 75, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deZBc9Xnu8e8zm/Z9RzsgC4RZLMbCQGIbGwgQJzKpJBfi65AYX4WUqcS5yS2T8i1fV7luynEqScUJgcgJZZzYJtexiRVbNmDZjuNgMCMihIQkNCMEGjTSjPYZbaOZee8ffYSbpmdTL6d7+vlUdfU55/c73e+cXp45S5+jiMDMzGpXXdoFmJlZuhwEZmY1zkFgZlbjHARmZjXOQWBmVuMa0i7gQsyePTuWLVuWdhlmZlVl8+bNhyJiTu70qgyCZcuW0dLSknYZZmZVRdKr+aZ705CZWY1zEJiZ1TgHgZlZjXMQmJnVOAeBmVmNK0oQSHpEUqekbYO0S9LnJbVK2ippdVbbbZJ2JW0PFKMeMzMbuWKtEXwRuG2I9tuBFcltHfAQgKR64MGkfRVwt6RVRarJzMxGoCi/I4iIH0laNkSXtcCXInPO62ckTZe0AFgGtEbEHgBJjyV9XypGXWY2cr19A5w820fP2T66z/RxsreP3r4BzvUP0Ncf9A0M0Nsf9CXj5wYy9/0DQQDnT2kfAUEk928eJ+mXr60iT4hfgafpv3P1IpbPnlTUxyzXD8oWAvuyxtuTafmmX5fvASStI7M2wZIlS0pTpdkYdeZcP62dPew60M2+o6c4eOIMB46f4cCJs3R1n+HEmcyXvr2VlHYFb7Z66YyqDYJ8izKGmP7WiRHrgfUAzc3NlRfTZhXk2KleftJ2mKfbDvPMnsO0dfUwkPWpmT25iXlTx3PRtPFcs3gaUyc0MmVcA5PGNTB5XANTxmeGm+rraKivo7FeNNTV0dSQuW+oF431dTTUifo6IQTKfGkKkJTcg9AbX6bZ42/pV2nfuDWkXEHQDizOGl8E7AeaBpluZqN0rn+AH+zs5Gub2/nBzk76BoKJTfW8c9lM7rhyAZfNn8KKeVNYMnMiTQ0+YNB+plxBsAG4P9kHcB1wPCI6JHUBKyQtB14H7gJ+o0w1mY0JZ8718+VnX+Pv/r2Nzu6zzJ48jnt/bjm3XjGfqxZNo7HeX/o2tKIEgaSvAu8FZktqB/4P0AgQEQ8DG4E7gFbgFPDbSVufpPuBJ4B64JGI2F6MmsxqwQ92dvKpDdvYd+Q01188iz+580res3KOv/xtVIp11NDdw7QH8LFB2jaSCQozG6Ez5/r5zLde4svPvsalcyfz5Y9ex42Xzk67LKtSVXkaarNadqjnLPd+8TleaD/O77z7Yv7w1pXe5m8FcRCYVZH9x05z9xee4eCJM6z/8LXcesX8tEuyMcBBYFYlDvWc5b//w7Mc6enlK//jXaxeMiPtkmyMcBCYVYGzff189NEW9h87zT/ee51DwIrKQWBWBT7zrZfYsu8YD31oNe9cNjPtcmyM8R4mswr33W0d/NMzr/E777mY269ckHY5NgY5CMwq2NGTvfzvf93O2xdO5Y9uXZl2OTZGedOQWQX7vxt3cOxUL1/6yBr/SMxKxu8sswq17fXj/Mvmdu79+eWsumhq2uXYGOYgMKtAEcGfbNzBzElNfOymS9Mux8Y4B4FZBfpx6yGebjvM773vUqaOb0y7HBvjHARmFehvf9DG/Knj+Y3rlqZditUAB4FZhdmy7xg/2XOYe39uuc8hZGXhd5lZhfm7f29j6vgG7r7Ol2S18nAQmFWQgyfO8ORLB7n7uiVMHueju608HARmFeRrLfvoHwjufqfXBqx8ihIEkm6TtEtSq6QH8rT/L0lbkts2Sf2SZiZteyW9mLS1FKMes2o0MBB89af7uPHSWSybPSntcqyGFBwEkuqBB4HbgVXA3ZJWZfeJiD+LiGsi4hrgj4F/j4gjWV1uStqbC63HrFr9uPUQrx87zd1rvDZg5VWMNYI1QGtE7ImIXuAxYO0Q/e8GvlqE5zUbUza8sJ8p4xu4ZdW8tEuxGlOMIFgI7Msab0+mvYWkicBtwNezJgfwpKTNktYN9iSS1klqkdTS1dVVhLLNKsfZvn6e2H6AX7hiPuMa6tMux2pMMYJAeabFIH1/CfjPnM1CN0bEajKblj4m6d35ZoyI9RHRHBHNc+bMKaxiswrzo5cP0X2mjw9c5dNMW/kVIwjagcVZ44uA/YP0vYuczUIRsT+57wQeJ7Opyaym/NsL+5kxsZEbL52ddilWg4oRBM8BKyQtl9RE5st+Q24nSdOA9wDfzJo2SdKU88PArcC2ItRkVjV6+wbYtOMgv3DFfJ9q2lJR8C9WIqJP0v3AE0A98EhEbJd0X9L+cNL1TuDJiDiZNfs84HFJ52v5SkR8t9CazKrJT185wsnefm6+3DuJLR1F+eliRGwENuZMezhn/IvAF3Om7QGuLkYNZtVq086DjGuo82YhS43XQ81SFBF8f2cnN1wyiwlNPlrI0uEgMEvRnkMnefXwKd532dy0S7Ea5iAwS9EPd2V+E3OTg8BS5CAwS9HTrYdYPnsSi2ZMTLsUq2EOArOU9PUP8OwrR7j+kllpl2I1zkFglpKtrx+n52wfN17io4UsXQ4Cs5T8pO0wAO+6eGbKlVitcxCYpeTptkNcNn8KsyaPS7sUq3EOArMU9PYN0LL3KDd4s5BVAAeBWQq27z/O2b4B3rlsRtqlmDkIzNKw+dWjAFy71EFg6XMQmKWgZe9RFs+cwNyp49MuxcxBYFZuEUHLq0dpXuqjhawyOAjMyuy1I6c41HPWm4WsYjgIzMqsZW9m/0CzdxRbhXAQmJXZln3HmDyugRVzp6RdihlQpCCQdJukXZJaJT2Qp/29ko5L2pLcPjXSec3Gmq3tx3j7wqnU1yntUsyAIlyhTFI98CBwC5kL2T8naUNEvJTT9T8i4gMXOK/ZmNDbN8COjm5++8ZlaZdi9oZirBGsAVojYk9E9AKPAWvLMK9Z1dl1oJve/gGuXDQt7VLM3lCMIFgI7Msab0+m5bpe0guSviPpilHOi6R1kloktXR1dRWhbLPye6H9GABXL5qeciVmP1OMIMi3oTNyxp8HlkbE1cBfA/86inkzEyPWR0RzRDTPmTPngos1S9PW9mPMmNjIohkT0i7F7A3FCIJ2YHHW+CJgf3aHiDgRET3J8EagUdLskcxrNpZsbT/OVYumI3lHsVWOYgTBc8AKScslNQF3ARuyO0iar+SdL2lN8ryHRzKv2Vhx5lw/uzt7uMr7B6zCFHzUUET0SbofeAKoBx6JiO2S7kvaHwZ+FfhdSX3AaeCuiAgg77yF1mRWiXYf7KF/IFi1YGrapZi9ScFBAG9s7tmYM+3hrOG/Af5mpPOajUU7Ok4AcJmDwCqMf1lsViY7DpxgQmM9S2ZOTLsUszdxEJiVyc6OblbOn+JfFFvFcRCYlUFEsOPACS5f4PMLWeVxEJiVwcETZzl26hyXe/+AVSAHgVkZ7DiQ7Cie7yCwyuMgMCuD80cMrZzvTUNWeRwEZmWws6ObhdMnMG1CY9qlmL2Fg8CsDHZ0eEexVS4HgVmJnTnXz55DJ72j2CqWg8CsxFo7M6eW8I5iq1QOArMSO7+j2JuGrFI5CMxKbOeBbsY31rF01qS0SzHLy0FgVmI7Ok6wcp5PLWGVy0FgVkIRkRwx5P0DVrkcBGYldKinl6OnzvmHZFbRihIEkm6TtEtSq6QH8rR/SNLW5Pa0pKuz2vZKelHSFkktxajHrFK0dvYAcOncySlXYja4gi9MI6keeBC4hcw1iJ+TtCEiXsrq9grwnog4Kul2YD1wXVb7TRFxqNBazCpNW5eDwCpfMdYI1gCtEbEnInqBx4C12R0i4umIOJqMPkPmIvVmY15rZw+TmuqZP3V82qWYDaoYQbAQ2Jc13p5MG8y9wHeyxgN4UtJmSesGm0nSOkktklq6uroKKtisXNq6erhk7mQkHzFklasYQZDvHR55O0o3kQmCT2RNvjEiVgO3Ax+T9O5880bE+ohojojmOXPmFFqzWVm0dfZwyRxvFrLKVowgaAcWZ40vAvbndpJ0FfD3wNqIOHx+ekTsT+47gcfJbGoyq3onz/ax//gZ7x+wileMIHgOWCFpuaQm4C5gQ3YHSUuAbwAfjoiXs6ZPkjTl/DBwK7CtCDWZpW5P10kALpnjXxRbZSv4qKGI6JN0P/AEUA88EhHbJd2XtD8MfAqYBfxtsq20LyKagXnA48m0BuArEfHdQmsyqwStXd2AjxiyyldwEABExEZgY860h7OGPwp8NM98e4Crc6ebjQVtnSeprxNLZnqNwCqbf1lsViKtnT0snTWRpgZ/zKyy+R1qViJtXT5iyKqDg8CsBPr6B9h7+KT3D1hVcBCYlcBrR05xrj+8RmBVwUFgVgI+2ZxVEweBWQm0+TcEVkUcBGYl0NrZw7yp45gyvjHtUsyG5SAwK4HWrh5vFrKq4SAwK7KIYI9PNmdVxEFgVmSd3WfpPtvnNQKrGg4CsyJrS44Y8hqBVQsHgVmRtfrylFZlHARmRdbW2cPkcQ3MnTIu7VLMRsRBYFZkrb48pVUZB4FZkbV1nvQPyayqOAjMiqj7zDkOnPDlKa26FCUIJN0maZekVkkP5GmXpM8n7VslrR7pvGbV5GeXp3QQWPUoOAgk1QMPArcDq4C7Ja3K6XY7sCK5rQMeGsW8ZlXDJ5uzalSMNYI1QGtE7ImIXuAxYG1On7XAlyLjGWC6pAUjnNesarR19dBQJ5bMnJh2KWYjVowgWAjsyxpvT6aNpM9I5gVA0jpJLZJaurq6Ci7arBRaO3tYNnsSjfXe/WbVoxjv1nzHyMUI+4xk3szEiPUR0RwRzXPmzBlliWbl0drV4yOGrOoUIwjagcVZ44uA/SPsM5J5zarCuf4BXjt8yvsHrOoUIwieA1ZIWi6pCbgL2JDTZwPwm8nRQ+8CjkdExwjnNasKrx4+Sd+AL09p1aeh0AeIiD5J9wNPAPXAIxGxXdJ9SfvDwEbgDqAVOAX89lDzFlqTWRpaOzOHjnqNwKpNwUEAEBEbyXzZZ097OGs4gI+NdF6zatSWnGzuYq8RWJXxoQ1mRdLW2cOCaeOZPK4o/1+ZlY2DwKxIMkcMeW3Aqo+DwKwIIoK2Tl+n2KqTg8CsCA6cOMPJ3n7/hsCqkoPArAjakiOGLvEagVUhB4FZEbR2dgM+dNSqk4PArAjauk4yZXwDcyb78pRWfRwEZkXQmuwo9uUprRo5CMyKwIeOWjVzEJgV6Pjpc3R1n/X+AataDgKzAp0/tYTXCKxaOQjMCtTmy1NalXMQmBWotauHpvo6Fs+YkHYpZhfEQWBWoLbOHpbOmkiDL09pVcrvXLMCvXywh7fNm5J2GWYXzEFgVoBTvX3sO3rKQWBVraAgkDRT0lOSdif3M/L0WSzpB5J2SNou6fez2j4t6XVJW5LbHYXUY1ZurZ09RMDb5nlHsVWvQtcIHgA2RcQKYFMynqsP+MOIuBx4F/AxSauy2v8yIq5Jbr5SmVWVlw9mjhh623yvEVj1KjQI1gKPJsOPAh/M7RARHRHxfDLcDewAFhb4vGYVYffBbprq61g6c2LapZhdsEKDYF5EdEDmCx+YO1RnScuAdwDPZk2+X9JWSY/k27SUNe86SS2SWrq6ugos26w4dh3s5pK5k33EkFW1Yd+9kr4naVue29rRPJGkycDXgY9HxIlk8kPAJcA1QAfw54PNHxHrI6I5IprnzJkzmqc2K5ndB3u8f8Cq3rBX2Y6Imwdrk3RQ0oKI6JC0AOgcpF8jmRD4ckR8I+uxD2b1+QLwrdEUb5am7jPneP3YaX5j3pK0SzErSKHrsxuAe5Lhe4Bv5nZQ5ry8/wDsiIi/yGlbkDV6J7CtwHrMymZ3cmoJHzpq1a7QIPgscIuk3cAtyTiSLpJ0/gigG4EPA+/Lc5jo5yS9KGkrcBPwBwXWY1Y2uw9mrkrmTUNW7YbdNDSUiDgMvD/P9P3AHcnwj4G8V+uIiA8X8vxmadp1oIfxjXUsnuEjhqy6+VAHswu0u7ObFXOnUFfnq5JZdXMQmF2gXQe6vX/AxgQHgdkFOHaql87us94/YGOCg8DsArxxagmvEdgY4CAwuwAvnz9iyOcYsjHAQWB2AXYf7GbyuAYumjY+7VLMCuYgMLsAuw52c+ncyWR+L2lW3RwEZqMUEby0/wSrLpqadilmReEgMBul14+d5sSZPlYtcBDY2OAgMBull/ZnTp7rNQIbKxwEZqP0UscJJLjMRwzZGOEgMBul7ftPcPHsSUxsKuhUXWYVw0FgNkqZHcXT0i7DrGgcBGajcPxU5mI03lFsY4mDwGwUtnccB+DyBd4/YGNHQUEgaaakpyTtTu7zXnxe0t7kAjRbJLWMdn6zSvFieyYIrlo0PeVKzIqn0DWCB4BNEbEC2JSMD+amiLgmIpovcH6z1G1tP86iGROYOakp7VLMiqbQIFgLPJoMPwp8sMzzm5XV1tePcdUi7yi2saXQIJgXER0Ayf3cQfoF8KSkzZLWXcD8SFonqUVSS1dXV4Flm43ekZO97Dty2puFbMwZ9kBoSd8D5udp+uQonufGiNgvaS7wlKSdEfGjUcxPRKwH1gM0NzfHaOY1K4at7ccAvEZgY86wQRARNw/WJumgpAUR0SFpAdA5yGPsT+47JT0OrAF+BIxofrNKsLX9OBJcudBBYGNLoZuGNgD3JMP3AN/M7SBpkqQp54eBW4FtI53frFJsbT/OxbMnMWV8Y9qlmBVVoUHwWeAWSbuBW5JxJF0kaWPSZx7wY0kvAD8Fvh0R3x1qfrNKExFs2XeUqxd7/4CNPQWdLCUiDgPvzzN9P3BHMrwHuHo085tVmlcPn+JQTy/NS2emXYpZ0fmXxWYj0PLqUQCuXerfPNrY4yAwG4HNrx5h6vgGVsydnHYpZkXnIDAbgZa9R1m9dAZ1db5GsY09DgKzYRw71cvuzh6avVnIxigHgdkwnn/t/P4B7yi2sclBYDaMn7Qdpqm+jncs8aGjNjY5CMyG8XTbYVYvnc74xvq0SzErCQeB2RCOnuzlpY4T3HjJ7LRLMSsZB4HZEJ7Zc5gIuOHSWWmXYlYyDgKzIfxn2yEmNdX71NM2pjkIzIbwdOth1iyfSWO9Pyo2dvndbTaIvYdOsufQSd7ztjlpl2JWUg4Cs0F8f2fm8hjvu2xeypWYlZaDwGwQ39/ZyYq5k1kya2LapZiVlIPALI+es308+8ph3nf5oJfRNhszHARmefzHy12c6w/e781CVgMKCgJJMyU9JWl3cv+Ws3JJWilpS9bthKSPJ22flvR6VtsdhdRjVizffrGDWZOaWO3TSlgNKHSN4AFgU0SsADYl428SEbsi4pqIuAa4FjgFPJ7V5S/Pt0fExtz5zcrtVG8fm3Z0cvuV82nwYaNWAwp9l68FHk2GHwU+OEz/9wNtEfFqgc9rVjKbdnRy+lw/H7jqorRLMSuLQoNgXkR0ACT3w+1Zuwv4as60+yVtlfRIvk1L50laJ6lFUktXV1dhVZsN4d9e2M+8qeN45zKfdtpqw7BBIOl7krblua0dzRNJagJ+Gfha1uSHgEuAa4AO4M8Hmz8i1kdEc0Q0z5njH/hYaRw52csPd3Xxi1deRL2vRmY1omG4DhFx82Btkg5KWhARHZIWAJ1DPNTtwPMRcTDrsd8YlvQF4FsjK9usNL7xfDu9/QP8t3cuTrsUs7IpdNPQBuCeZPge4JtD9L2bnM1CSXicdyewrcB6zC5YRPCVn77G6iXTWTl/StrlmJVNoUHwWeAWSbuBW5JxJF0k6Y0jgCRNTNq/kTP/5yS9KGkrcBPwBwXWY3bBfvrKEfZ0neTuNUvSLsWsrIbdNDSUiDhM5kig3On7gTuyxk8Bbzmhe0R8uJDnNyumR/7zFaZNaOQXr1owfGezMcQHSZsBrZ09PPnSQX7z+qVMbCro/yOzquMgMAPW/6iNcQ11/NYNy9IuxazsHARW8149fJLH/+t1fr15MbMmj0u7HLOycxBYzfvcE7toqKvj/psuTbsUs1Q4CKymPf/aUb69tYN1776YuVPHp12OWSocBFazzvUP8MnHtzF3yjjWvfvitMsxS40Pj7Ca9fAP29jRcYL1H76WSeP8UbDa5TUCq0lb9h3j89/fzS9dfRG3XjE/7XLMUuUgsJpzqOcsv/tPm5k3dTyfWXtF2uWYpc7rw1ZTTp7tY92XWjhyspev/+4NTJ/YlHZJZqnzGoHVjNO9/fzOP25my75j/NVd7+DtC6elXZJZRfAagdWEIyd7uffR59iy7xh/9qtXc9vbvV/A7DwHgY15LXuP8Htf/S8On+zloQ9d6xAwy+EgsDGr+8w5/uKpl3n06b0snjmRr913PVctmp52WWYVx0FgY87hnrM89tw+vvAfezh++hwfum4Jn7jtMqaMb0y7NLOK5CCwMeF0bz8/bj3Ev2zex6YdnfQNBDetnMP/vGUlVy7yTmGzoRQUBJJ+Dfg0cDmwJiJaBul3G/BXQD3w9xFx/kpmM4F/BpYBe4Ffj4ijhdRkY9/AQPD6sdPsOtDNi68f5yd7DvNfrx3lXH8we3ITv3XDMn6tebEvN2k2QoWuEWwDfgX4u8E6SKoHHiRzqcp24DlJGyLiJeABYFNEfFbSA8n4JwqsySpcRHCuPzjXP0Bv30DmPhk+fa6f46fPceJ0HydOn+N4cjtyqpf9x07TcewM+46e4lRvPwASvP2iaXzkxuXccOlsbrhkFo31PirabDQKvVTlDgBJQ3VbA7RGxJ6k72PAWuCl5P69Sb9HgR9SwiD4/KbdbHhh/xvjEfGz4dzOMfho9nz55s1ujpzWyH3ctzzxyJ7nLY8zxPMMVV9uj7c+7uA1Dfe4uX0HBjIB0Ns/kFvAkOoEMyY2sWD6eJbOmsgNl85i5bwpvG3+FN42bwqTfZ4gs4KU4xO0ENiXNd4OXJcMz4uIDoCI6JA0d7AHkbQOWAewZMmFXVx87pRxrJyXs7lAeQfPP+dgXcnNvqHmfUtMvmXerL7DPm7++fLO+6bxYfoO2Tb4vMMts2z1daKpoY7G+jrGNdTRWC+a6utozJo2rqGeaRMamTahkakTGpg2oZHJ4xqG+2fDzAowbBBI+h6Q78DrT0bEN0fwHPk+wUP8H5xfRKwH1gM0NzePen6Au9Ys4a41FxYiZmZj1bBBEBE3F/gc7cDirPFFwPntMwclLUjWBhYAnQU+l5mZjVI59qo9B6yQtFxSE3AXsCFp2wDckwzfA4xkDcPMzIqooCCQdKekduB64NuSnkimXyRpI0BE9AH3A08AO4D/FxHbk4f4LHCLpN1kjir6bCH1mJnZ6Cn3yJRq0NzcHC0teX+yYGZmg5C0OSKac6f7gGszsxrnIDAzq3EOAjOzGucgMDOrcVW5s1hSF/DqBc4+GzhUxHKKpVLrgsqtzXWNTqXWBZVb21ira2lEzMmdWJVBUAhJLfn2mqetUuuCyq3NdY1OpdYFlVtbrdTlTUNmZjXOQWBmVuNqMQjWp13AICq1Lqjc2lzX6FRqXVC5tdVEXTW3j8DMzN6sFtcIzMwsi4PAzKzGjckgkPRrkrZLGpDUnNP2x5JaJe2S9AuDzD9T0lOSdif3M0pQ4z9L2pLc9kraMki/vZJeTPqV5Ux7kj4t6fWs+u4YpN9tyXJsTa45Xeq6/kzSTklbJT0uafog/cqyzIb7+5Xx+aR9q6TVpaol6zkXS/qBpB3JZ+D38/R5r6TjWa/vp0pdV/K8Q74uaSyv5HlXZi2LLZJOSPp4Tp+yLDNJj0jqlLQta9qIvo8K+jxGxJi7AZcDK8lcA7k5a/oq4AVgHLAcaAPq88z/OeCBZPgB4E9LXO+fA58apG0vMLvMy+/TwB8N06c+WX4XA03Jcl1V4rpuBRqS4T8d7HUpxzIbyd8P3AF8h8xV+t4FPFuG124BsDoZngK8nKeu9wLfKud7aiSvSxrLa5DX9QCZH16VfZkB7wZWA9uypg37fVTo53FMrhFExI6I2JWnaS3wWEScjYhXgFZgzSD9Hk2GHwU+WJpKM/8FAb8OfLVUz1Eia4DWiNgTEb3AY2SWW8lExJORub4FwDNkrnaXlpH8/WuBL0XGM8D05Ep8JRMRHRHxfDLcTeYaIAtL+ZxFVPbllcf7gbaIuNAzFxQkIn4EHMmZPJLvo4I+j2MyCIawENiXNd5O/g/JvIjogMwHC5hbwpp+HjgYEbsHaQ/gSUmbJa0rYR257k9Wzx8ZZFV0pMuyVD5C5r/HfMqxzEby96e6jCQtA94BPJun+XpJL0j6jqQrylTScK9L2u8pyFxBcbB/ytJYZjCy76OClt2w1yyuVJK+B8zP0/TJiBjskpfKM61kx8+OsMa7GXpt4MaI2C9pLvCUpJ3Jfw0lqw14CPgMmWXzGTKbrj6S+xB55i14WY5kmUn6JNAHfHmQhynJMsstNc+03L+/rO+3Nz2xNBn4OvDxiDiR0/w8mU0fPcn+n38FVpShrOFel9SWF4Ayl9L9ZeCP8zSntcxGqqBlV7VBEBE3X8Bs7cDirPFFwP48/Q5KWhARHcmqaWcpapTUAPwKcO0Qj7E/ue+U9DiZVcCCv9RGuvwkfQH4Vp6mkS7LotYl6R7gA8D7I9k4mucxSrLMcozk7y/JMhqOpEYyIfDliPhGbnt2METERkl/K2l2RJT05GojeF1SWV5Zbgeej4iDuQ1pLbPESL6PClp2tbZpaANwl6RxkpaTSfSfDtLvnmT4HmCwNYxC3QzsjIj2fI2SJkmacn6YzM7Sbfn6FlPOdtk7B3nO54AVkpYn/0ndRWa5lbKu24BPAL8cEacG6VOuZTaSv38D8JvJ0TDvAo6fX8UvlWSf0z8AOyLiLwbpMz/ph6Q1ZL4HDpe4rpG8LmVfXjkGXTtPY5llGcn3UWGfx1LvBU/jRubLqx04CxwEnshq+ySZveu7gNuzpv89yRFGwCxgE7A7uZ9Zojq/CNyXM+0iYGMyfDGZvf8vANvJbB4px/L7R+BFYGvyZlqQW1syfgeZo1LaylEbmZ37+4AtybVcSvcAAACJSURBVO3hNJdZvr8fuO/8a0pmdf3BpP1Fso5gK2FNP0dmk8DWrOV0R05d9yfL5gUyO91vKENdeV+XtJdXVn0TyXyxT8uaVvZlRiaIOoBzyXfYvYN9HxXz8+hTTJiZ1bha2zRkZmY5HARmZjXOQWBmVuMcBGZmNc5BYGZW4xwEZmY1zkFgZlbj/j8RFDZkH0CdqQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "y = x.tanh()\n", "pl.plot(x.numpy(), y.numpy())" @@ -664,9 +1394,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 76, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD4CAYAAADsKpHdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXBd9X3n8ffHli38/CgbRzbYgHl0gsEexzQlJaEphGkxSUNrOhO8LbMOLOw203a30Mw2mXbYKc0mTGkLXVIYHprwsBCKJ4UmBPLULQEEGGywjQU2tmxZkp8l25Is6bt/3J/oRVzJsqR775H0ec3c0bnfc373fO+RdL/3nN/vnKOIwMzMbEy5EzAzs2xwQTAzM8AFwczMEhcEMzMDXBDMzCypKHcCAzV79uxYuHBhudMwMxtWXn311b0RUVVo3rAtCAsXLqSmpqbcaZiZDSuS3u9tng8ZmZkZ4IJgZmaJC4KZmQEuCGZmlpywIEhaIOknkjZJekvSH6b4TEnPSdqafs7Ia3ObpFpJWyRdkRdfJmlDmneXJKV4paTHUvwlSQuH/q2amVlf+rOH0AH8cUScB6wEbpZ0PnAr8HxELAaeT89J81YDFwBXAndLGpte6x5gLbA4Pa5M8RuAAxFxFnAncMcQvDczMzsJJywIEVEfEa+l6WZgE1ANrAIeTIs9CFyTplcBj0ZEW0RsA2qBFZLmAVMj4sXIXWL1oR5tul/rCeDy7r0HMzMrjZPqQ0iHci4CXgLmRkQ95IoGMCctVg3szGtWl2LVabpn/ENtIqIDOATMOpnczMxGutbjnfz1v27mjZ0Hi/L6/S4IkiYDTwJfjYjDfS1aIBZ9xPtq0zOHtZJqJNU0NTWdKGUzsxFlb0sbd//0XTbv6esjeOD6VRAkjSNXDL4bEd9P4YZ0GIj0szHF64AFec3nA7tTfH6B+IfaSKoApgH7e+YREfdGxPKIWF5VVfDMazOzEWv/kXYAZkwcX5TX788oIwH3AZsi4tt5s9YBa9L0GuDpvPjqNHJoEbnO45fTYaVmSSvTa17fo033a30JeCF8Kzczsw/ZlwrCrMnFKQj9uZbRp4AvAxskrU+xPwP+Cnhc0g3ADuBagIh4S9LjwNvkRijdHBGdqd1NwAPABODZ9IBcwXlYUi25PYPVg3xfZmYjzv6WXEGYOamyKK9/woIQEf9G4WP8AJf30uZ24PYC8RpgSYF4K6mgmJlZYQeOdheEMh0yMjOzbNh3pJ1xY8XUU4pzoWoXBDOzYWJ/SzszJo6nWKdpuSCYmQ0T+460F+1wEbggmJkNG/uPtLkgmJkZHDh63AXBzMxgX0sbs1wQzMxGt+OdXRxu7SjaOQjggmBmNiwcONJ9DsK4oq3DBcHMbBjYd6S4ZymDC4KZ2bDwH3sI7kMwMxvVin1hO3BBMDMbFvZ7D8HMzOA/9hCmT3CnspnZqLb/SBvTJ46jYmzxPrZdEMzMhoF9Le1FPSkNXBDMzIaFvS1tzJ5cvCGn0L9baN4vqVHSxrzYY5LWp8f27jupSVoo6VjevH/Ia7NM0gZJtZLuSrfRJN1q87EUf0nSwqF/m2Zmw9velnZmTylzQSB3y8sr8wMR8bsRsTQilgJPAt/Pm/1u97yIuDEvfg+wltw9lhfnveYNwIGIOAu4E7hjQO/EzGwE29vcRlW59xAi4ufk7nP8Eelb/u8Aj/T1GpLmAVMj4sWICOAh4Jo0exXwYJp+Arhcxbr7g5nZMNR6vJPmtg5mF/EcBBh8H8KlQENEbM2LLZL0uqSfSbo0xaqBurxl6lKse95OgIjoAA4BswqtTNJaSTWSapqamgaZupnZ8LC3pQ2g/H0IJ3AdH947qAdOi4iLgD8CvidpKlDoG3+kn33N+3Aw4t6IWB4Ry6uqqgaRtpnZ8LG3JXcOQrELwoDv1CypAvgisKw7FhFtQFuaflXSu8DZ5PYI5uc1nw/sTtN1wAKgLr3mNHo5RGVmNhrtbU57CBnoVO7NrwObI+KDQ0GSqiSNTdNnkOs8fi8i6oFmSStT/8D1wNOp2TpgTZr+EvBC6mcwMzPyDxmVuQ9B0iPAi8A5kuok3ZBmreajncmfBt6U9Aa5DuIbI6L72/5NwD8CtcC7wLMpfh8wS1ItucNMtw7i/ZiZjTil6kM44SGjiLiul/h/KhB7ktww1ELL1wBLCsRbgWtPlIeZ2Wi1t6WdKZUVnDJubFHX4zOVzcwybm9LW9H7D8AFwcws83KXrShu/wG4IJiZZd7elvai9x+AC4KZWeaV4sJ24IJgZpZpxzu7OHj0uAuCmdlot6/7LOUp7kMwMxvVmtJZysW+0im4IJiZZVrD4VYA5k49pejrckEwM8uwxrSHMGeq9xDMzEa1hsOtSMW/bAW4IJiZZVpjcxuzJo1n3Njif1y7IJiZZVhTcytVU4rffwAuCGZmmdZwuI25Jeg/ABcEM7NMa2xuZU4JLmwHLghmZpnV2RU0NbeVZMgpuCCYmWXWviNtdAXZ2UOQdL+kRkkb82LfkLRL0vr0uCpv3m2SaiVtkXRFXnyZpA1p3l3pVppIqpT0WIq/JGnh0L5FM7PhqfFw9zkI2dlDeAC4skD8zohYmh7PAEg6n9ytNS9Ibe7uvscycA+wltx9lhfnveYNwIGIOAu4E7hjgO/FzGxEaWzOnaWcmT2EiPg5sP9EyyWrgEcjoi0itpG7f/IKSfOAqRHxYkQE8BBwTV6bB9P0E8Dl3XsPZmajWfcewnDoQ7hF0pvpkNKMFKsGduYtU5di1Wm6Z/xDbSKiAzgEzCq0QklrJdVIqmlqahpE6mZm2deQCkIpzlKGgReEe4AzgaVAPfCtFC/0zT76iPfV5qPBiHsjYnlELK+qqjq5jM3MhpnG5lZmThrP+IrSjP8Z0FoioiEiOiOiC/gOsCLNqgMW5C06H9id4vMLxD/URlIFMI3+H6IyMxuxGg63laz/AAZYEFKfQLcvAN0jkNYBq9PIoUXkOo9fjoh6oFnSytQ/cD3wdF6bNWn6S8ALqZ/BzGxU23P4GPOmlab/AKDiRAtIegS4DJgtqQ74OnCZpKXkDu1sB74CEBFvSXoceBvoAG6OiM70UjeRG7E0AXg2PQDuAx6WVEtuz2D1ULwxM7Phbs+hVj5ePb1k6zthQYiI6wqE7+tj+duB2wvEa4AlBeKtwLUnysPMbDRp6+hkb0t7SfcQfKaymVkGdQ85PdUFwcxsdKs/lDspzXsIZmajXP2hY4ALgpnZqLcn7SGcOm1CydbpgmBmlkH1h1qZUlnB5MoTjv0ZMi4IZmYZVH/oWEk7lMEFwcwsk/YcanVBMDOz3CGjUnYogwuCmVnmHO/soqmlraQdyuCCYGaWOY3NbUSUdsgpuCCYmWVO/cHcOQjuQzAzG+V2pYIwf7oPGZmZjWp1B3IFoXqGC4KZ2ai26+AxZkwcx8TxpTspDVwQzMwyZ9eBYyXfOwAXBDOzzKk7cJTqEvcfQD8KgqT7JTVK2pgX+6akzZLelPSUpOkpvlDSMUnr0+Mf8tosk7RBUq2ku9KtNEm323wsxV+StHDo36aZ2fAQEew6eIzq6RNLvu7+7CE8AFzZI/YcsCQiPgG8A9yWN+/diFiaHjfmxe8B1pK7z/LivNe8ATgQEWcBdwJ3nPS7MDMbIfYfaaf1eBfzs3jIKCJ+Tu5ex/mxH0VER3r6S2B+X68haR4wNSJejIgAHgKuSbNXAQ+m6SeAy7v3HszMRpvuIafDtQ/hD4Bn854vkvS6pJ9JujTFqoG6vGXqUqx73k6AVGQOAbMKrUjSWkk1kmqampqGIHUzs2zZ1T3kNIt9CH2R9DWgA/huCtUDp0XERcAfAd+TNBUo9I0/ul+mj3kfDkbcGxHLI2J5VVXVYFI3M8ukD05KK8MewoAHuUpaA/wmcHk6DEREtAFtafpVSe8CZ5PbI8g/rDQf2J2m64AFQJ2kCmAaPQ5RmZmNFnUHjjFp/FimTRhX8nUPaA9B0pXAnwJXR8TRvHiVpLFp+gxyncfvRUQ90CxpZeofuB54OjVbB6xJ018CXuguMGZmo82ug7lzEMrRlXrCPQRJjwCXAbMl1QFfJzeqqBJ4LiX9yzSi6NPAX0jqADqBGyOi+9v+TeRGLE0g1+fQ3e9wH/CwpFpyewarh+SdmZkNQ3UHjjF/RumHnEI/CkJEXFcgfF8vyz4JPNnLvBpgSYF4K3DtifIwMxvpIoId+47wyUUzy7J+n6lsZpYR+4+0c6S9k9NmlmcPwQXBzCwjduzPdcm6IJiZjXLdBeH0WS4IZmaj2o59uYJQrk5lFwQzs4zYsf8oc6ZUMmH82LKs3wXBzCwjduw/WrbDReCCYGaWGTv2H2VBmTqUwQXBzCwTWo93sudwa9lGGIELgplZJuw6eIyI8o0wAhcEM7NM6B5h5D0EM7NR7r29RwBYNHty2XJwQTAzy4Bte1uYNmEcMyaW/rLX3VwQzMwyYNveIyyaPaksl73u5oJgZpYB25qOcMbsSWXNwQXBzKzMjrV3svtQK4tcEMzMRrft+1KHclXGC4Kk+yU1StqYF5sp6TlJW9PPGXnzbpNUK2mLpCvy4sskbUjz7kq30kRSpaTHUvwlSQuH9i2amWXb9g9GGGW8IJC77eWVPWK3As9HxGLg+fQcSeeTuwXmBanN3d33WAbuAdaSu8/y4rzXvAE4EBFnAXcCdwz0zZiZDUfdQ04Xzsp4QYiIn5O713G+VcCDafpB4Jq8+KMR0RYR24BaYIWkecDUiHgxIgJ4qEeb7td6Arhc5exmNzMrsW17jzB3aiWTKk94V+OiGmgfwtyIqAdIP+ekeDWwM2+5uhSrTtM94x9qExEdwCFgVqGVSlorqUZSTVNT0wBTNzPLlveaWsp+uAiGvlO50Df76CPeV5uPBiPujYjlEbG8qqpqgCmamWVHRFDb2MJZc8p3hnK3gRaEhnQYiPSzMcXrgAV5y80Hdqf4/ALxD7WRVAFM46OHqMzMRqSmljYOt3aweM6Ucqcy4IKwDliTptcAT+fFV6eRQ4vIdR6/nA4rNUtamfoHru/Rpvu1vgS8kPoZzMxGvNqGFgAWZ2AP4YQ9GJIeAS4DZkuqA74O/BXwuKQbgB3AtQAR8Zakx4G3gQ7g5ojoTC91E7kRSxOAZ9MD4D7gYUm15PYMVg/JOzMzGwa2NuYKwllzh0FBiIjrepl1eS/L3w7cXiBeAywpEG8lFRQzs9Fma2MzU0+poGpyZblT8ZnKZmbltLWhhcVzp5T1onbdXBDMzMqotrElE/0H4IJgZlY2+1ra2HekPRNDTsEFwcysbGpTh/LiueUfcgouCGZmZfNOQzOQjSGn4IJgZlY2b9c3M23COOZNO6XcqQAuCGZmZbN5z2HOm5eNEUbggmBmVhZdXcGWPc2ce+rUcqfyARcEM7My2LH/KEfbOzlvXjY6lMEFwcysLDbvOQzAefO8h2BmNqptqm9mjMjEVU67uSCYmZXBpvrDLJw9iQnjx5544RJxQTAzK4NNew5zXoY6lMEFwcys5A4ebWfn/mMsqZ5W7lQ+xAXBzKzENu7KdSh/fKQUBEnnSFqf9zgs6auSviFpV178qrw2t0mqlbRF0hV58WWSNqR5dykrZ2mYmRXBhl2HAFhSPUIOGUXElohYGhFLgWXAUeCpNPvO7nkR8QyApPPJ3Q3tAuBK4G5J3b0p9wBryd1yc3Gab2Y2Im3cdYgFMycwfeL4cqfyIUN1yOhy4N2IeL+PZVYBj0ZEW0RsA2qBFZLmAVMj4sV0L+WHgGuGKC8zs8x5c9fBzB0ugqErCKuBR/Ke3yLpTUn3S5qRYtXAzrxl6lKsOk33jJuZjThZ7VCGISgIksYDVwP/N4XuAc4ElgL1wLe6Fy3QPPqIF1rXWkk1kmqampoGlbeZWTl0dyh/onp6mTP5qKHYQ/g88FpENABERENEdEZEF/AdYEVarg5YkNduPrA7xecXiH9ERNwbEcsjYnlVVdUQpG5mVlpv1B0EstehDENTEK4j73BR6hPo9gVgY5peB6yWVClpEbnO45cjoh5olrQyjS66Hnh6CPIyM8uc13cc4IyqSZnrUAaoGExjSROBzwFfyQv/taSl5A77bO+eFxFvSXoceBvoAG6OiM7U5ibgAWAC8Gx6mJmNKBHBazsO8tlz55Q7lYIGVRAi4igwq0fsy30sfztwe4F4DbBkMLmYmWXd+/uOsv9IOxefNuPEC5eBz1Q2MyuR13ceAOCi07LXoQwuCGZmJfPa+weZXFnB2XOzc8nrfC4IZmYl8tqOA1y4YBpjx2Tz6jwuCGZmJdDS1sGm+sOZ7T8AFwQzs5Ko2b6froBPLpp14oXLxAXBzKwEXt62n4ox4uLTs9mhDC4IZmYl8dK2/Xx8/jQmjh/UaP+ickEwMyuyY+2dvFl3kBWLZpY7lT65IJiZFdnrOw5wvDNYmeH+A3BBMDMrul++t48xgmULszvCCFwQzMyK7he1e7lwwXSmnjKu3Kn0yQXBzKyIDh09zhs7D3LpWbPLncoJuSCYmRXRi+/tpSvgVxdn/x4uLghmZkX0i617mTR+bGYvaJfPBcHMrIj+rXYvK8+Yxbix2f+4zX6GZmbD1HtNLby/7yifPjv7h4tgkAVB0nZJGyStl1STYjMlPSdpa/o5I2/52yTVStoi6Yq8+LL0OrWS7kq30jQzG9ae39QIkNk7pPU0FHsIn4mIpRGxPD2/FXg+IhYDz6fnSDofWA1cAFwJ3C1pbGpzD7CW3H2WF6f5ZmbD2vObGzhn7hQWzJxY7lT6pRiHjFYBD6bpB4Fr8uKPRkRbRGwDaoEVkuYBUyPixYgI4KG8NmZmw9KhY8d5ZfsBLj9veOwdwOALQgA/kvSqpLUpNjci6gHSz+6tUQ3szGtbl2LVabpn/CMkrZVUI6mmqalpkKmbmRXPz95porMrhlVBGOxl9z4VEbslzQGek7S5j2UL9QtEH/GPBiPuBe4FWL58ecFlzMyy4Edv7WHWpPEsXZDty1XkG9QeQkTsTj8bgaeAFUBDOgxE+tmYFq8DFuQ1nw/sTvH5BeJmZsNS6/FOXtjcyBVLTs3s7TILGXBBkDRJ0pTuaeA3gI3AOmBNWmwN8HSaXgesllQpaRG5zuOX02GlZkkr0+ii6/PamJkNOz/d0sTR9k6uWjKv3KmclMEcMpoLPJVGiFYA34uIf5X0CvC4pBuAHcC1ABHxlqTHgbeBDuDmiOhMr3UT8AAwAXg2PczMhqVnN9YzY+I4Vp6R7fsf9DTgghAR7wEXFojvAy7vpc3twO0F4jXAkoHmYmaWFa3HO3l+UyO/+Yl5VAyDs5PzDa9szcwy7sebGmhp6+C3LvxYuVM5aS4IZmZD6Puv7WLetFNYeUa2745WiAuCmdkQ2dvSxs/eaWLV0uphNbqomwuCmdkQWbd+N51dwRcvLnhubea5IJiZDYGI4LFXdvLx6mmcPXdKudMZEBcEM7Mh8Or7B9jS0MzvffK0cqcyYC4IZmZD4Hsv7WByZQVXD8PRRd1cEMzMBunAkXb+ZUM9q5Z+jEmVg71EXPm4IJiZDdL3Xt5BW0cX11+ysNypDIoLgpnZILR1dPLAv2/n0sWzOefU4dmZ3M0FwcxsEH7wRj1NzW3850vPKHcqg+aCYGY2QF1dwT0/e5dz5k7h0sWzy53OoLkgmJkN0LMb91Db2MItnz2LdOXnYc0FwcxsALq6gr99YStnVk3iqo8Pr/se9MYFwcxsAP5lQz2b9zTzXz+7eFhet6gQFwQzs5PU3tHFN3+4hXNPnTIsL3Pdm8HcQnOBpJ9I2iTpLUl/mOLfkLRL0vr0uCqvzW2SaiVtkXRFXnyZpA1p3l0aCQfjzGzE+t5L77Nj/1Fu/fy5I2bvAAZ3C80O4I8j4rV0b+VXJT2X5t0ZEf87f2FJ5wOrgQuAjwE/lnR2uo3mPcBa4JfAM8CV+DaaZpZB+1rauPPHW/nVs2bza2dXlTudITXgPYSIqI+I19J0M7AJ6Ouar6uARyOiLSK2AbXACknzgKkR8WJEBPAQcM1A8zIzK6Y7/nUzR9o6+MbV54+IkUX5hqQPQdJC4CLgpRS6RdKbku6XNCPFqoGdec3qUqw6TfeMF1rPWkk1kmqampqGInUzs357edt+Hq+p44ZLF3HWnOF9VnIhgy4IkiYDTwJfjYjD5A7/nAksBeqBb3UvWqB59BH/aDDi3ohYHhHLq6pG1q6amWXbsfZO/scTb7Bg5gT+22cXlzudohhUQZA0jlwx+G5EfB8gIhoiojMiuoDvACvS4nXAgrzm84HdKT6/QNzMLDO++cMtbN93lDt++xPD+oqmfRnMKCMB9wGbIuLbefH8MzS+AGxM0+uA1ZIqJS0CFgMvR0Q90CxpZXrN64GnB5qXmdlQ+8mWRu7/f9u4/pLT+ZUzh/8lKnozmDL3KeDLwAZJ61Psz4DrJC0ld9hnO/AVgIh4S9LjwNvkRijdnEYYAdwEPABMIDe6yCOMzCwT9hxq5U8ef4NzT53Cn111XrnTKaoBF4SI+DcKH/9/po82twO3F4jXAEsGmouZWTG0Hu/kK//0Kq3HO/m737uIU8aNLXdKRTUyD4SZmQ1SRHDb9zfwxs6D/J8vLxuRo4p68qUrzMwK+OYPt/DU67v448+dzRUXnFrudErCBcHMrIe//0ktd//0Xa5bcRq3fPascqdTMj5kZGaWRATffu4d/vaFWq6+8GP85aoLRtzZyH1xQTAzI1cM/tczm/jOL7bxu8sX8L+++PERdeG6/nBBMLNR72h7B7c+uYF1b+xmzSWn8/XfuoAxo6wYgAuCmY1y2/ce4cZ/epUtDc389yvO4b9cduaoOkyUzwXBzEaliOAHb9bztac2MGaMeOD3V4y4y1mfLBcEMxt16g8d43/+80Z+vKmRC+dP4+9+72IWzJxY7rTKzgXBzEaN1uOdPPzi+/zN81vp6Oria1edx+9/aiEVYz0CH1wQzGwUON7ZxROv1vE3P97KnsOt/NrZVfzFqgs4fdakcqeWKS4IZjZiHTjSziOv7ODhF9+n/lArF502nTt/dymXnDmr3KllkguCmY0oHZ1d/Pu7+/jn9bt4ZkM9rce7+NRZs7j9C0v4zDlzRu0Iov5wQTCzYe9oewf/XruPn2xp5IdvNbC3pY2pp1TwxYvns+aShZxz6si/MN1QcEEws2Hn4NF2Xt9xkFffP/DBo72zi0njx/Jr51Rx9YXVfObcKiorRvblqoeaC4KZZVZHZxfb9x1la0Mz7zS08E5jM5vqD/Ne0xEAxo4R58+byvWXnM5nzp3D8oUzXAQGITMFQdKVwN8AY4F/jIi/KnNKZlYkEcGR9k72tbSxt6WNvS3t7G1po+FwG7sOHKPuwFHqDhxjz+FWOrsCAAkWzJjI2XMn89sXz2fZ6TP4xPxpTByfmY+xYS8TW1LSWODvgc8BdcArktZFxNvlzcxs5IoIOruCjq6gK01/8Mh73tUFHV1ddEXQ1tGVexzvoq2jM+953nRHJ63Hu2hp7aCl7TgtbR00t+YeLW0dtLR2cPBYO63Huz6SkwSnTj2F+TMmsGLRTKqnT2DR7EmcPXcKZ86Z5A//IsvK1l0B1EbEewCSHgVWkbv/8pB6/JWd3PuL9wrOi4he2/U6p/cmfc0a2LqA3ppFH636WFWf8/rSW/4DyT3XboD5D6BNX1kOZF25dkO8PQb499HbzIAPPvS7IlcEBvq776/JlRW5xykVTEmPj00/hSmV45g6oYLZkyuZNbmS2ZPHM3tyJbMnVzJz0njGV/gksXLJSkGoBnbmPa8DPtlzIUlrgbUAp5122oBWNGPSeM6Z28eIgz5GpPU2q69hbH0NcOtr9Fvf7QrP7XMwXZ/r6iP/AW2PIqxrAG9u4Nu3r3Ynn/9AfpcnMpAcxyh3zP1DD4kxY0RFj/gY5WJj0jIVY3Ox8RVjqKwYQ2XFWCrH5U1XjEnPc9Pjx44ZlVcLHe6yUhAK/eV85PtLRNwL3AuwfPnyAX2/+dz5c/nc+XMH0tTMbETLyr5ZHbAg7/l8YHeZcjEzG5WyUhBeARZLWiRpPLAaWFfmnMzMRpVMHDKKiA5JtwA/JDfs9P6IeKvMaZmZjSqZKAgAEfEM8Ey58zAzG62ycsjIzMzKzAXBzMwAFwQzM0tcEMzMDAD1dYp8lklqAt4fYPPZwN4hTGcoZTU353VynNfJy2puIy2v0yOiqtCMYVsQBkNSTUQsL3cehWQ1N+d1cpzXyctqbqMpLx8yMjMzwAXBzMyS0VoQ7i13An3Iam7O6+Q4r5OX1dxGTV6jsg/BzMw+arTuIZiZWQ8uCGZmBozggiDpWklvSeqStLzHvNsk1UraIumKXtrPlPScpK3p54wi5PiYpPXpsV3S+l6W2y5pQ1quZqjz6GWd35C0Ky+/q3pZ7sq0HWsl3VqCvL4pabOkNyU9JWl6L8uVZJud6P0r5640/01JFxcrl7x1LpD0E0mb0v/AHxZY5jJJh/J+v39e7Lzy1t3n76ZM2+ycvG2xXtJhSV/tsUxJtpmk+yU1StqYF+vX59Gg/x8jYkQ+gPOAc4CfAsvz4ucDbwCVwCLgXWBsgfZ/Ddyapm8F7ihyvt8C/ryXeduB2SXeft8A/uQEy4xN2+8MYHzarucXOa/fACrS9B29/V5Ksc368/6Bq4Bnyd0VcCXwUgl+d/OAi9P0FOCdAnldBvyglH9T/f3dlGObFfi97iF3AlfJtxnwaeBiYGNe7ISfR0Px/zhi9xAiYlNEbCkwaxXwaES0RcQ2oBZY0ctyD6bpB4FripNp7hsR8DvAI8VaR5GsAGoj4r2IaAceJbfdiiYifhQRHenpL8ndXa9c+vP+VwEPRc4vgemS5hUzqYioj4jX0nQzsIncfcuHi5Jvsx4uB96NiIFeCWFQIuLnwP4e4f58Hg36/3HEFoQ+VAM7857XUfifZW5E1EPuHwyYU8ScLgUaImJrL/MD+JGkVyWtLWIePd2Sdtnv72UXtb/bslj+gNw3yUJKsc368/7Luo0kLQQuAl4qMPsSSW9IelbSBaXKiRP/bsr9d7Wa3r+clWub9efzaNDbLTM3yBkIST8GTi0w62sR8XRvzQrEijb2tjupAxwAAAKESURBVJ85XkffewefiojdkuYAz0nanL5FFC034B7gL8ltm78kd0jrD3q+RIG2g96W/dlmkr4GdADf7eVlirLNeqZaINbz/Zf07+1DK5YmA08CX42Iwz1mv0bukEhL6h/6Z2BxKfLixL+bcm6z8cDVwG0FZpdzm/XHoLfbsC4IEfHrA2hWByzIez4f2F1guQZJ8yKiPu2uNhYjR0kVwBeBZX28xu70s1HSU+R2DQf94dbf7SfpO8APCszq77Yc0rwkrQF+E7g80sHTAq9RlG3WQ3/ef1G20YlIGkeuGHw3Ir7fc35+gYiIZyTdLWl2RBT9Im79+N2UZZslnwdei4iGnjPKuc3o3+fRoLfbaDxktA5YLalS0iJyFf7lXpZbk6bXAL3tcQzWrwObI6Ku0ExJkyRN6Z4m16m6sdCyQ6nHMdsv9LLOV4DFkhalb1aryW23YuZ1JfCnwNURcbSXZUq1zfrz/tcB16eRMyuBQ927/sWS+qTuAzZFxLd7WebUtBySVpD7LNhXzLzSuvrzuyn5NsvT6956ubZZ0p/Po8H/Pxa7x7xcD3IfYnVAG9AA/DBv3tfI9cZvAT6fF/9H0ogkYBbwPLA1/ZxZpDwfAG7sEfsY8EyaPoPcaIE3gLfIHTYpxfZ7GNgAvJn+qOb1zC09v4rcKJZ3S5EbuUEAO4H16fEP5dxmhd4/cGP375Tcbvzfp/kbyBvxVsScfpXcoYI387bTVT3yuiVtmzfIdc7/Son+rgr+bsq9zdJ6J5L7gJ+WFyv5NiNXkOqB4+kz7IbePo+G+v/Rl64wMzNgdB4yMjOzAlwQzMwMcEEwM7PEBcHMzAAXBDMzS1wQzMwMcEEwM7Pk/wNYtumuXL72iQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "y = x.exp()\n", "pl.plot(x.numpy(), y.numpy())" @@ -674,9 +1427,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 77, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 77, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAb8ElEQVR4nO3deXBc1YHv8e+RZMnWvi/WYkl4lXcjGzuEJZjFDCZAJpMAIaFIMoTUI2SfrCSVWVKZN6mQvGIyGbMMhLAECITAEMAJJDbg3Rivkhfttlr72nJr6T7vjxbGAQO21erbt/v3qUpZaim3f7cs/Tg+95x7jbUWERFxrzinA4iIyMSoyEVEXE5FLiLicipyERGXU5GLiLhcghNvmpuba8vLy514axER19qxY0entTbvna87UuTl5eVs377dibcWEXEtY0zjqV7X1IqIiMupyEVEXE5FLiLicipyERGXU5GLiLicilxExOVU5CIiLufIOnIRkVhgraXbO0JDl5f6ziEau7x8orqU0uzkkL6PilxEZAKstfQMjVLf6aWxy0tDp5f6riEaOr00dHkZ8I2d+N74OMOysiwVuYiIE/p9o9R1eKnvHKS+M1jUjV1e6ju99J9U1nEGirOmUZ6TwnVlxZTnpFCem0x5TgolWckkJoR+RltFLiIybswfoKXnOHWdg9R1eDnSMciRDi91HV46B4dPfJ8xUJw5jYrcFK5ZUsyMnGQqclMoz02hdJLK+v2oyEUk5vQOjYwX9CB1ncE/j3QER9ij/rcff5mVPIXKvFQumZtHZV4qlbkpVOalUJqdTFJCvINn8LdU5CISlUb9AZq7h06MrOs6vCdG2l3ekRPfNyXeUJadTGVeKpfOK6AyL4Vz8lKozE0lKyXRwTM4fSpyEXG14TE/9Z1eDrUNcqh9kMPtAxxqG6S+08tY4O3RdW5qIpW5qVxW9VZZp1KZl0pp1jQS4t29EltFLiKu4Bv1U9fh5dB4UR9qH+BQ+yCNXUP4xws7zsCMnBRm5qdyaVXBeFmncE5uKhnJUxw+g8mjIheRiHJ8xM+RjmBRH2wb5FBbcJTd1D3EWwPs+DjDjJxkZuencdXCImbmpzIrP43KvBSmTomcuetwUZGLiCPG/AEauoao9QxQ4+mnxjNArWeA5p4h7HhhJ8QZKnJTqJqezkeXFDO7IFjY5bmRdbHRaSpyEZl0HQPDf1PYNZ5+DrUNMjwWAIJTIhW5KSwszuDvl5UwqyCVWfmplOemMMXl89fhMOEiN8aUAr8GCoEAsM5a+4uJHldE3Mc36udQ2yAHPP0nirvWM0Dn4NurRHJTk5hXlManV85gblE6cwvTmJmfGpNTIqESihH5GPB1a+1OY0wasMMYs95auz8ExxaRCGStpa1/mH3H+th/rJ8D4yPthk7viXnsqVPimF2Qxkfm5J8o7DmFaeSmJjkbPgpNuMitta1A6/jHA8aYA0AxoCIXiQKBgKWhy8u+Y/3j/wuW98lrsWfkJDO3MI21i6Yzb7ywZ+SkEB9nHEweO0I6R26MKQeWAltO8bVbgVsBysrKQvm2IhIiw2PBqZH944W971g/B1r78Y74geDmmVn5aVwyN5/509Opmp7BvKI00qZG79I+NwhZkRtjUoHfAV+x1va/8+vW2nXAOoDq6mr7zq+LSHj5Rv3UeAbY3dLLnpY+9h7r53D7wIkt6imJ8cwrSufj55Ywf3oGVdPTmVWQqtUiESgkRW6MmUKwxB+21j4VimOKSOiM+gMcahtkd0svu4/2sbull1rP26Wdk5LI/OIMLp6TR1VROvOnp1Oek0KcpkZcIRSrVgxwH3DAWvuziUcSkYkIBCx1nd5gabcES3vfsf4TS/3SpiawqCSDz19QyaLiDBaVZjI9YyrBX2Vxo1CMyM8HPg3sMcbsGn/tu9ba50NwbBH5AK19x9nZ2MubLb3sbull79F+BoeD98eeNiWeBcXp3LRyBotKMlhUksmM7GSNtKNMKFatvArop0IkDHyjfvYd6+eNph52NvWws7EXT78PgMT4OOYVpXHd0mIWlmSwuCSTmfmpWjkSA7SzUyRCWWs51ucLlnZjLzubeth/rJ8Rf3CKpDhzGssrsllWlsmysizmFaWH/YEGEhlU5CIRIjja7jtR2jubemjrDz6VJikhjsUlmdxyfjlLy7JYVpZJfvpUhxNLpFCRizik7/goOxq72VLfzbb6bvYc7TuxiqQ0exorK3NYVpbF0rJM5hWl654j8p5U5CJh0tbvY2t9N9sautla301t2wDWBjfZLCzO4LMfruDcsiyWlmWRl6Zt7HL6VOQik8BaS0PXENvqu9naECzvxq4hAJIT4zl3RhZ/t7CI5eXZLC3L1A2jZEJU5CIh0tw9xKYjXbx+pJPXj3TRPhCc385OSaR6RhafXjmDFRXZVBWlu/7RYhJZVOQiZ8nT52NTXed4eXfR0nMcCN6m9UPn5HBeZTbnVWRzTl6qNtvIpFKRi5ymrsFhNtd18/qRTjbVdVHX4QUgY9oUVlXmcOuFlayqzGFmvopbwktFLvIefKN+tjV0s/FQJxsOdlDjGQCCN5M6rzKHG1eUsbIyh6qidO2UFEepyEXGWWs50jHIXw8Gi3tLfRe+0QCJ8XFUl2fxzSvmsOqcHBYWZ2gpoEQUFbnEtL6hUV47EizuDQc7ONYX3O5emZvC9cvLuHB2Lisrc0hO1K+KRC79dEpMCQQsu4/28ZfadjYc7GBXcy8BC2lJCZw/M5fbL8njglm5lGYnOx1V5LSpyCXqeYfH2Hiok5dr2ni5poPOwWGMgUUlmdz+kZlcODuPJaWZWhIorqUil6jU3D3EyzXt/Lmmnc1HuhjxB0ibmsDFc/JZPTefC2fnkZ2S6HRMkZBQkUtUsNayu6WPF/d5+POBdmrbgitMKnNTuPlDM7hkbgHV5Vm6SClRSUUurjXmD7CtoYcX93l4aZ+HY30+4uMMK8qz+f5V87hkbj6VealOxxSZdCpycRXfqJ/XDnfy4j4P6/e30TM0SlJCHBfOzuPrl89h9bx8MpM1ZSKxRUUuEc836ucvte08t7uVV2ra8Y74SUtKYPW8fK6YX8hFc/K0PFBimn76JSKNjAXYeKiD53a38tI+D94RP7mpiXx0STFrFhSyqjJHT8MRGacil4gx5g+wqa6LZ988xgt7PfT7xsiYNoWrF0/n6sXTOa8iW0sERU5BRS6OstayvbGHZ3Yd5Y97PHR5R0hNSuDyqgKuXjyd82fmauQt8gFU5OKI5u4hfrezhad2HqWpe4ipU+K4dF4BaxdN5+I5eXrQgsgZUJFL2Az4RvnjHg9P7mxha303xsCqyhy+vHoWaxYUkpKkH0eRs6HfHJlU/oDltcOd/G5nCy/u8+AbDVCZm8I3r5jDtUuLKc6c5nREEddTkcukaO07zmNbm3l8ezOtfT4ypk3h4+eW8LFlJSwtzdSDF0RCSEUuIeMPWP56sJ1HtjTxck07FrhgVh53rq1i9bx8khI07y0yGVTkMmGePh+/3dbMb7c1cazPR15aEl+8+ByuX16m28GKhIGKXM5KIGD566GOE6Nvf8Bywaxc7lxbxaVVBbo5lUgYqcjljAz4RnlyRwsPvt5AQ9cQuamJ3HphJTcsL6MsR6NvESeoyOW0NHZ5eeD1Bp7Y3sLg8BjLyjL52uVzWDO/UBt2RBymIpf3ZK1lS30392yo4+XaduKNYe2iIm45v4LFpZlOxxORcSpyeRd/wPLSPg+/2lDHm8295KQk8qWPzORTK2dQkD7V6Xgi8g4qcjnBN+rnqZ1HuWdjHfWdXmbkJPOv1y7g4+eWaMu8SARTkQuDw2M8tKmR+16tp3NwmEUlGfzyU8u4Yn4h8XHauCMS6UJS5MaY+4G1QLu1dkEojimTb8A3yq83NXLPxjp6h0a5YFYuX7x4Casqc7TzUsRFQjUifwC4G/h1iI4nk6jfN8oDrzVw36v19B0f5ZK5+dyxehZLdAFTxJVCUuTW2g3GmPJQHEsmz+DwGPdtrOfeV+sY8I1x6bxggS8qUYGLuFnY5siNMbcCtwKUlZWF620FGB7z88iWJu5++TBd3hEuqyrgy6tnsaA4w+loIhICYStya+06YB1AdXW1Ddf7xjJ/wPLMrqP8bP1BWnqOs6oyh39aM4elZVlORxORENKqlShkreWV2nb+7wu11HgGmD89nR9ft5ALZuXqIqZIFFKRR5mDbQP8y3P72Xiok/KcZO6+cSl/t6CIOC0jFIlaoVp++ChwMZBrjGkBfmitvS8Ux5bT0zs0wl3rD/KbLU2kJMbzw6uruGnlDN2FUCQGhGrVyg2hOI6cuTF/gEe2NvGz9QfpPz7Kp86bwVcvm012SqLT0UQkTDS14mJb67u58/d7qW0bYFVlDj/8aBVzC9OdjiUiYaYid6G+oVF+8sIBHt3aTHHmNH5107lcMb9AFzJFYpSK3EWstTy3u5UfPbufnqER/vGCCr562WySE/XXKBLL1AAu0dw9xJ3P7OUvtR0sLM7ggVuWa0OPiAAq8ogXCFh+vamBf3+hFmPgB2uruPlD5boroYicoCKPYK19x/nmE7t59XAnF8/J49+uW0hx5jSnY4lIhFGRR6hndh3lzt/vZdRv+fF1C7lhRakuZorIKanII0zv0Ah3PrOPZ988xtKyTO76xBLKc1OcjiUiEUxFHkFeO9zJ1x9/k87BYb5x+Wxuu+gcErQzU0Q+gIo8AgQClrtfOcxdfzpIZW4K93zmfBaWaEWKiJweFbnDerwjfPXxXfyltoNrl0znxx9bqHXhInJG1BgO2t3Syxd/s5OOgWH+5doF3HRemS5oisgZU5E7wFrLw1ua+Odn95OXlsQTt61isZ6XKSJnSUUeZr5RP999eg9P7TzKRbPz+Pknl5ClOxWKyASoyMOoY2CYLzy0nZ1NvXzl0lncccksPfBBRCZMRR4mB1r7+fyD2+nyDvNfn1rGlQuLnI4kIlFCRR4GGw91cNtDO0idmsATX/iQlhaKSEipyCfZ7984yjeeeJOZ+ak8cMsKCjOmOh1JRKKMinwSrdtwhB8/X8PKymzWfaaa9KlTnI4kIlFIRT4JrLX8x4u1/PIvR7hqURE/+8RikhLinY4lIlFKRR5i1lp+9Ox+Hni9gRvPK+Nfr1mglSkiMqlU5CHkD1i+9/QeHtvWzOc+XMH3r5qnnZoiMulU5CESCFi+89RuHt/ewpcumcnXLputEheRsFCRh4C1lh/8YS+Pb2/hjktm8rXL5zgdSURiiG52PUHWWv75uf38ZnMTt110Dl+9bLbTkUQkxqjIJ+iu9Qf5n9ca+Oz5FXxrzRxNp4hI2KnIJ+ChzY38v5cP84nqEu5cqwubIuIMFflZemFvKz94Zi+r5+bz4+sWqsRFxDEq8rOwraGbOx7bxdLSTO6+cZmeqykijlIDnaHm7iG+8NAOSjKncd/Ny5mWqB2bIuIsFfkZGBwe4/MPbmfMH+Dem6v1QAgRiQhaR36a/AHLlx99g8Mdgzx4ywoq81KdjiQiAmhEftruWn+QP9e088Orq/jwrFyn44iInKAiPw2v1LZz9yuH+WR1KZ9ZVe50HBGRvxGSIjfGrDHG1BpjDhtjvh2KY0aKY73H+dpvdzG3MI0fXTPf6TgiIu8y4SI3xsQD/wlcCVQBNxhjqiZ63Egw6g9w+yM7GRkL8MtPLWPqFK1QEZHIE4oR+QrgsLW2zlo7AjwGXBOC4zrupy/VsrOpl5/8/SJd3BSRiBWKIi8Gmk/6vGX8tb9hjLnVGLPdGLO9o6MjBG87ubbWd7NuQx03rCjj6sXTnY4jIvKeQlHkp9qbbt/1grXrrLXV1trqvLy8ELzt5BkcHuPrT+yiNCuZ7181z+k4IiLvKxTryFuA0pM+LwGOheC4jvm3/z1AS89xHv/CKlKStNReRCJbKEbk24BZxpgKY0wicD3whxAc1xGv1LTz6NYmbr2gkuXl2U7HERH5QBMeblprx4wxtwMvAvHA/dbafRNO5gDv8BjffXoPswtS9YAIEXGNkMwbWGufB54PxbGc9PM/HaS1z8fdN67SUkMRcQ3t7Bx3oLWf+19r4PrlpZw7Q1MqIuIeKnIgELB8//d7yZg2hW+tmet0HBGRM6IiB57c0cKOxh6+feVc3ZpWRFwn5ot8wDfKv79Qw/LyLD6+rMTpOCIiZyzmi/yeDXV0eUe4c20VcXF67qaIuE9MF3n7gI97NtazdlERi0oynY4jInJWYrrIf/GnQ4z6A3zj8jlORxEROWsxW+R1HYM8tq2ZG88rozw3xek4IiJnLWaL/Kcv1TI1IY47Vs9yOoqIyITEZJHXegZ4fo+Hz324gtzUJKfjiIhMSEwW+X//9QjJifHccn6F01FERCYs5oq8pWeIZ948xg0ryrT5R0SiQswV+b0b6zHA5z6s0biIRIeYKvKuwWEe29bEtUuLmZ45zek4IiIhEVNF/uCmRnyjAW67qNLpKCIiIRMzRX58xM+DrzdweVUBM/PTnI4jIhIyMVPkz+0+Rt/xUT6ruXERiTIxU+SPbWumMi+F8yr00AgRiS4xUeQH2wbY0djDDcvLMEZ3OBSR6BITRf7o1iamxBs+tqzY6SgiIiEX9UXuG/Xz9BtHuWJ+ITnaji8iUSjqi/zFfR56h0a5YUWZ01FERCZF1Bf5I1uamJGTzKrKHKejiIhMiqgu8sYuL1vqu/nk8lI9xk1EolZUF/nzezwAXLNEFzlFJHpFdZG/sM/D4pIMinVfFRGJYlFb5Md6j/Nmcy9rFhQ5HUVEZFJFbZG/sDc4rbJmQaHDSUREJldUF/ncwjQq9GBlEYlyUVnk7QM+tjV2azQuIjEhKov8pX1tWAtXan5cRGJAVBb5C3s9VOamMLsg1ekoIiKTLuqKvHdohE11XVyxoFB3OhSRmBB1Rb7hUCf+gOXyqgKno4iIhMWEitwY8w/GmH3GmIAxpjpUoSZic10XaUkJLCzOcDqKiEhYTHREvhf4GLAhBFlCYvORLlZUZJMQH3X/2BAROaUJtZ219oC1tjZUYSaqrd9HXaeXlbrToYjEkLANW40xtxpjthtjtnd0dEzKe2yu6wJQkYtITEn4oG8wxvwJONXOmu9Za5853Tey1q4D1gFUV1fb0054BjbXdZE2NYGq6emTcXgRkYj0gUVurb00HEFCYXNdN+dVZBOve4+LSAyJmiuCnj4f9ZofF5EYNNHlh9cZY1qAVcD/GmNeDE2sM7elXvPjIhKbPnBq5f1Ya58Gng5RlgnZdKSL9KkJzCvS/LiIxJaomVrZXNfFiooczY+LSMyJiiJv7TtOQ9cQKyuznY4iIhJ2UVHkW+u7Ac2Pi0hsiooi39/aT2J8HHMK05yOIiISdlFR5DWtA5yTn8oU3V9FRGJQVDRfrWeAeRqNi0iMcn2R93hH8PT7NK0iIjHL9UVe4xkAYK7Wj4tIjHJ9kdd6+gE0tSIiMcv1RV7jGSAreQp5aUlORxERcURUFPncwnQ9aFlEYparizwQsBxsG9CFThGJaa4u8uaeIYZG/MwrUpGLSOxydZEfaB1fsVKoFSsiErtcXeS1ngGMgdkFGpGLSOxydZHXePopz0lhWmK801FERBzj8iIfYI5G4yIS41xb5MdH/DR0eZmrC50iEuNcW+QH2wawVhc6RURcW+S1b91jRWvIRSTGubbIG7q8JMQZSrOTnY4iIuIo1xa5p99HQfpUPWxZRGKea4u8rd9HfrpulCUi4uIiH6YwfarTMUREHOfeIu8LTq2IiMQ6Vxa5d3iMgeExFbmICC4t8rZ+HwCFGZojFxFxZZF7xotcI3IREZcWeZuKXETkBJcW+TCAVq2IiODSIvf0+UhLSiAlKcHpKCIijnNlkWszkIjI21xb5IUZmlYREQHXFvmwLnSKiIybUJEbY/7DGFNjjNltjHnaGJMZqmDvJRCwtPVrV6eIyFsmOiJfDyyw1i4CDgLfmXik99c9NMJYwGrFiojIuAkVubX2JWvt2Pinm4GSiUd6f54+rSEXETlZKOfIPwv88b2+aIy51Riz3RizvaOj46zf5O3NQFq1IiIC8IELsY0xfwIKT/Gl71lrnxn/nu8BY8DD73Uca+06YB1AdXW1Pau0nLQZSKtWRESA0yhya+2l7/d1Y8zNwFpgtbX2rAv6dHn6fRgDeakakYuIwGkU+fsxxqwBvgVcZK0dCk2k99fW5yM3NYmEeFeunBQRCbmJtuHdQBqw3hizyxjzqxBkel9tAz6tWBEROcmERuTW2pmhCnK6PH0+SrKSw/22IiIRy3XzE8HNQJofFxF5i6uK3Dfqp2doVFMrIiIncVWRdwwElx4WaOmhiMgJripyPeJNROTd3FXk49vzNbUiIvI2VxX5W9vzVeQiIm9zXZEnJcSRPk2PeBMReYurivycvFSuXVKMMcbpKCIiEcNVQ9vrV5Rx/Yoyp2OIiEQUV43IRUTk3VTkIiIupyIXEXE5FbmIiMupyEVEXE5FLiLicipyERGXU5GLiLicCcPzkt/9psZ0AI1n8H/JBTonKU4k03nHllg9b4jdcz/T855hrc1754uOFPmZMsZst9ZWO50j3HTesSVWzxti99xDdd6aWhERcTkVuYiIy7mlyNc5HcAhOu/YEqvnDbF77iE5b1fMkYuIyHtzy4hcRETeg4pcRMTlIrrIjTFrjDG1xpjDxphvO50nXIwxpcaYV4wxB4wx+4wxX3Y6UzgZY+KNMW8YY55zOku4GGMyjTFPGmNqxv/eVzmdKRyMMV8d/xnfa4x51BgTlQ/kNcbcb4xpN8bsPem1bGPMemPMofE/s872+BFb5MaYeOA/gSuBKuAGY0yVs6nCZgz4urV2HrAS+D8xdO4AXwYOOB0izH4BvGCtnQssJgbO3xhTDNwBVFtrFwDxwPXOppo0DwBr3vHat4E/W2tnAX8e//ysRGyRAyuAw9baOmvtCPAYcI3DmcLCWttqrd05/vEAwV/qYmdThYcxpgS4CrjX6SzhYoxJBy4E7gOw1o5Ya3udTRU2CcA0Y0wCkAwcczjPpLDWbgC63/HyNcCD4x8/CFx7tseP5CIvBppP+ryFGCmzkxljyoGlwBZnk4TNz4F/AgJOBwmjSqAD+J/xKaV7jTEpToeabNbao8BPgSagFeiz1r7kbKqwKrDWtkJw8Abkn+2BIrnIzSlei6m1ksaYVOB3wFestf1O55lsxpi1QLu1dofTWcIsAVgG/Je1dingZQL/zHaL8Tnha4AKYDqQYoy5ydlU7hTJRd4ClJ70eQlR+s+uUzHGTCFY4g9ba59yOk+YnA981BjTQHAq7RJjzG+cjRQWLUCLtfatf3U9SbDYo92lQL21tsNaOwo8BXzI4Uzh1GaMKQIY/7P9bA8UyUW+DZhljKkwxiQSvAjyB4czhYUxxhCcLz1grf2Z03nCxVr7HWttibW2nODf98vW2qgfoVlrPUCzMWbO+Eurgf0ORgqXJmClMSZ5/Gd+NTFwkfckfwBuHv/4ZuCZsz1QQkjiTAJr7Zgx5nbgRYJXs++31u5zOFa4nA98GthjjNk1/tp3rbXPO5hJJteXgIfHBy11wC0O55l01totxpgngZ0EV2q9QZRu1TfGPApcDOQaY1qAHwI/AR43xnyO4H/U/uGsj68t+iIi7hbJUysiInIaVOQiIi6nIhcRcTkVuYiIy6nIRURcTkUuIuJyKnIREZf7/zFu8n+yeXWjAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "y = torch.log(x)\n", "pl.plot(x.numpy(), y.numpy())" @@ -686,18 +1462,26 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# But what about the GPU?\n", - "How do I use the GPU?\n", - "\n", - "If you have a GPU make sure that the right pytorch is installed\n", - "(check https://pytorch.org/ for details)." + "# But what about GPUs?\n", + "How do I use A GPU?" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 79, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "device(type='cpu')" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "my_device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", "my_device" @@ -708,28 +1492,52 @@ "metadata": {}, "source": [ "If you have a GPU you should get something like: \n", - "`device(type='cuda', index=0)`\n", - "\n", - "You can move data to the GPU by doing `.to(device)`." + "`device(type='cuda', index=0)`" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 88, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1., 1., 1., 1., 1.])" + ] + }, + "execution_count": 88, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ + "# you can initialize a tensor in a specfic device\n", "torch.ones(5, device=my_device)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 91, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1., 0., 0.],\n", + " [0., 1., 0.],\n", + " [0., 0., 1.]])" + ] + }, + "execution_count": 91, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "data = torch.eye(3)\n", - "data.to(my_device)" + "# you can move data to the GPU by doing .to(device)\n", + "data = torch.eye(3) # data is on the cpu \n", + "data.to(my_device) # data is moved to my_device" ] }, { @@ -741,9 +1549,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 83, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[2., 0., 0.],\n", + " [0., 2., 0.],\n", + " [0., 0., 2.]])" + ] + }, + "execution_count": 83, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "res = data + data\n", "res" @@ -751,10 +1572,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 87, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "device(type='cpu')" + ] + }, + "execution_count": 87, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ + "# you can get a tensor's device via the .device attribute\n", "res.device" ] }, @@ -764,55 +1597,98 @@ "source": [ "# Automatic differentiation with `autograd`\n", "\n", - "Ref:\n", + "Central to all neural networks in PyTorch is the `autograd` package. \n", + "\n", + "We can say that it is the _true_ power behind PyTorch. The autograd package provides automatic differentiation for all operations on Tensors. It is a **define-by-run** framework, which means that your backprop is defined by how your code is run, and that **every single iteration can be different**.\n", + "\n", + "Refs:\n", "- https://pytorch.org/docs/stable/autograd.html\n", "- https://pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "x = torch.tensor(2.)\n", - "x" + "`torch.Tensor` is the central class of the package. If you set its attribute `.requires_grad` as `True`, it starts to track all operations on it. When you finish your computation you can call `.backward()` and have all the gradients computed automatically. The gradient for this tensor will be accumulated into `.grad` attribute." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 140, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(2.)\n" + ] + } + ], "source": [ - "x = torch.tensor(2., requires_grad=True)\n", - "x" + "x = torch.tensor(2.)\n", + "print(x)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 156, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(2., requires_grad=True)\n" + ] + } + ], "source": [ - "print(x.requires_grad)" + "# setting requires_grad in directly via tensor's constructor\n", + "x = torch.tensor(2., requires_grad=True)\n", + "\n", + "# or by setting .requires_grad attribute\n", + "# you can do this at any moment to track operations on x\n", + "x.requires_grad = True \n", + "\n", + "print(x)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 157, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n", + "None\n" + ] + } + ], "source": [ - "print(x.grad)" + "print(x.requires_grad)\n", + "print(x.grad) # no gradient yet" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 158, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Grad of x: None\n" + ] + } + ], "source": [ + "# let's perform a simple operation on x\n", "y = x ** 2\n", "\n", "print(\"Grad of x:\", x.grad)" @@ -820,67 +1696,110 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 159, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Grad of y with respect to x: tensor(4.)\n" + ] + } + ], "source": [ + "# if you want to compute the derivatives, you can call .backward() on a Tensor\n", "y.backward()\n", - "\n", "print(\"Grad of y with respect to x:\", x.grad)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# What is going to happen here?\n", - "x = torch.tensor(2., requires_grad=True)\n", - "x.backward()" + "To stop a tensor from tracking history, you can call `.detach()` to detach it from the computation history, and to prevent future computation from being tracked." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 179, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(2., requires_grad=True)\n", + "tensor(4., grad_fn=)\n", + "tensor(4.)\n", + "tensor(218.3926, grad_fn=)\n", + "tensor(873.5704)\n" + ] + } + ], "source": [ - "# Don't record the gradient\n", - "# Useful for inference\n", + "x = torch.tensor(2., requires_grad=True)\n", + "print(x)\n", "\n", - "x = torch.tensor(2.)\n", + "y = x ** 2\n", + "print(y)\n", "\n", - "with torch.no_grad():\n", - " y = x * x\n", - " print(x.grad)" + "c = y.detach() # c will be treated as a constant! c has the same contents as y but requires_grad=False\n", + "print(c)\n", + "\n", + "z = c * y.exp() \n", + "print(z)\n", + "\n", + "z.backward()\n", + "print(x.grad)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "`nn.Module` and `nn.Parameter` keep track of gradients for you." + "To prevent tracking history (and using memory), you can also wrap the code block in with `torch.no_grad()`:. This can be particularly helpful when evaluating a model because the model may have trainable parameters with `requires_grad=True`, but for which we don’t need the gradients." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 177, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x: tensor(2., requires_grad=True)\n", + "y: tensor(4., grad_fn=)\n", + "x: tensor(2., requires_grad=True)\n", + "y: tensor(8.)\n" + ] + } + ], "source": [ - "# w.x + b\n", - "lin = torch.nn.Linear(2, 1, bias=True)\n", - "lin.weight" + "x = torch.tensor(2.)\n", + "x.requires_grad = True\n", + "print('x:', x)\n", + "\n", + "y = x ** 2\n", + "print('y:', y)\n", + "\n", + "with torch.no_grad():\n", + " y = 2 * y\n", + " print('x:', x) # Try to think why x.requires_grad is True\n", + " print('y:', y)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "type(lin.weight)" + "There’s one more class which is very important for autograd implementation - a `Function`.\n", + "\n", + "`Tensor` and `Function` are interconnected and build up an acyclic graph, that encodes a complete history of computation. Each tensor has a `.grad_fn` attribute that references a `Function` that has created the `Tensor` (except for `Tensor`s created by the user - their `grad_fn` is `None`).\n", + "\n", + "====> Let's go back and see the `grad_fn` in our previous examples." ] }, { @@ -889,31 +1808,53 @@ "source": [ "If you still don't believe autograd works, here's something that I think will change your mind --- we're going to compute the derivative of an unnecessarily complicated function:\n", "\n", - "$$ y(x) = \\sum_x e^{0.001 x^2} + \\sin(x^3) \\times \\log(x)$$" + "$$ y(x) = \\sum_{x_i} e^{0.001 x_i^2} + \\sin(x_i^3) \\cdot \\log(x_i)$$" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 137, "metadata": {}, "outputs": [], "source": [ - "x = torch.arange(1, 10, 0.1, dtype=torch.float, requires_grad=True)" + "def complicated_func(X):\n", + " return torch.sum(torch.exp(0.001 * X ** 2) + torch.sin(X ** 3) * torch.log(X))" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def complicated_func(X):\n", - " return torch.sum(torch.exp(0.001 * X ** 2) + torch.sin(X ** 3) * torch.log(X))" + "execution_count": 138, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1.0000, 1.1000, 1.2000, 1.3000, 1.4000, 1.5000, 1.6000, 1.7000, 1.8000,\n", + " 1.9000, 2.0000, 2.1000, 2.2000, 2.3000, 2.4000, 2.5000, 2.6000, 2.7000,\n", + " 2.8000, 2.9000, 3.0000, 3.1000, 3.2000, 3.3000, 3.4000, 3.5000, 3.6000,\n", + " 3.7000, 3.8000, 3.9000, 4.0000, 4.1000, 4.2000, 4.3000, 4.4000, 4.5000,\n", + " 4.6000, 4.7000, 4.8000, 4.9000, 5.0000, 5.1000, 5.2000, 5.3000, 5.4000,\n", + " 5.5000, 5.6000, 5.7000, 5.8000, 5.9000, 6.0000, 6.1000, 6.2000, 6.3000,\n", + " 6.4000, 6.5000, 6.6000, 6.7000, 6.8000, 6.9000, 7.0000, 7.1000, 7.2000,\n", + " 7.3000, 7.4000, 7.5000, 7.6000, 7.7000, 7.8000, 7.9000, 8.0000, 8.1000,\n", + " 8.2000, 8.3000, 8.4000, 8.5000, 8.6000, 8.7000, 8.8000, 8.9000, 9.0000,\n", + " 9.1000, 9.2000, 9.3000, 9.4000, 9.5000, 9.6000, 9.7000, 9.8000, 9.9000],\n", + " requires_grad=True)" + ] + }, + "execution_count": 138, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x = torch.arange(1, 10, 0.1, dtype=torch.float, requires_grad=True)\n", + "x" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 139, "metadata": {}, "outputs": [], "source": [ @@ -923,20 +1864,150 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 135, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 8.4347e-01, 9.6745e-01, 7.0215e-01, -1.5371e-01, -1.5448e+00,\n", + " -2.8139e+00, -2.5935e+00, 3.4371e-01, 4.9029e+00, 6.1208e+00,\n", + " -7.1154e-01, -9.6026e+00, -4.3225e+00, 1.2014e+01, 5.0618e+00,\n", + " -1.7083e+01, 5.3129e+00, 1.4884e+01, -2.4178e+01, 1.9542e+01,\n", + " -8.3407e+00, -2.0807e+00, 8.0647e+00, -7.7079e+00, -1.1458e+00,\n", + " 2.0335e+01, -4.4317e+01, 4.9858e+01, -6.3655e+00, -5.7770e+01,\n", + " 2.6313e+01, 6.9782e+01, 1.9344e+01, -4.6108e+01, -8.0576e+01,\n", + " -9.1352e+01, -9.6715e+01, -1.0142e+02, -8.7320e+01, -1.8511e+01,\n", + " 9.4970e+01, 9.7035e+01, -9.6455e+01, -4.8176e+01, 1.3684e+02,\n", + " -1.5338e+02, 1.5415e+02, -1.6741e+02, 1.6771e+02, -7.1524e+01,\n", + " -1.3881e+02, 1.4273e+02, 1.9093e+02, 6.2615e+01, -4.0747e+01,\n", + " -6.2126e+01, 9.7839e+00, 1.7279e+02, 2.5614e+02, -5.8047e+01,\n", + " -2.4144e+02, 2.8856e+02, -2.5302e+02, 2.7245e+02, -3.2850e+02,\n", + " 2.1105e+02, 2.3265e+02, -1.9564e+02, -3.6942e+02, -3.7990e+02,\n", + " -3.9796e+02, -3.5900e+02, 7.7348e+00, 4.3732e+02, -2.2126e+02,\n", + " -2.6230e+01, 5.5592e+01, 1.6339e+02, -4.8904e+02, 1.6288e+02,\n", + " 5.2793e+02, 5.0286e+02, 5.1288e+02, 5.7520e+02, 2.1343e+02,\n", + " -5.8578e+02, 2.3057e+02, -2.5720e+01, 1.8482e+02, -6.0595e+02])" + ] + }, + "execution_count": 135, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x.grad" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 134, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD4CAYAAAAEhuazAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO29eZhcZ3Wv+66ae27Nas2yPMoSnoQHjM1gwGa0k5wEhwAOgTjhkIQkJ+Hi5CT3JgfncrgJUxLgGAjYYGJ8iC8mXDAemMGWLQ/YlmTZsuahpZbUc3fN3/1j769qV9Wu6qquXepu9XqfR09379pV9anUWnvt3/p9a4kxBkVRFGV+EZrpBSiKoiinHw3+iqIo8xAN/oqiKPMQDf6KoijzEA3+iqIo85DITC+gXhYvXmzWrVs308tQFEWZUzz55JMnjDFLyo/PmeC/bt06tm3bNtPLUBRFmVOIyH6/4yr7KIqizEMCCf4i0isi3xKRF0Rkp4hcJSILReQhEXnJ/brAc/5tIrJbRHaJyPVBrEFRFEWpn6Ay/88ADxhjzgcuAnYCHwUeMcacAzzi/oyIbARuBi4EbgA+JyLhgNahKIqi1EHTwV9EuoFrgS8DGGPSxpgh4EbgTve0O4Gb3O9vBO4xxqSMMXuB3cDlza5DURRFqZ8gMv+zgAHgKyLytIh8SUQ6gGXGmKMA7tel7vkrgYOe5x9yj1UgIreKyDYR2TYwMBDAUhVFURQIJvhHgEuBzxtjLgHGcSWeKojPMd/ucsaYO4wxW4wxW5YsqXAqKYqiKNMkiOB/CDhkjNnq/vwtnIvBMRHpA3C/Hvecv9rz/FXAkQDWoSiKotRJ08HfGNMPHBSR89xD1wE7gO8At7jHbgHud7//DnCziMRFZD1wDvB4s+tQFEWZaR54vp8jQ5MzvYy6CMrt88fA3SLyLHAx8A/Ax4E3ishLwBvdnzHGbAfuxblAPAB8yBiTC2gdiqIoM8LhoUn+8OtPcvdW3z1Vs45AdvgaY54Btvg8dF2V828Hbg/ivRVFUWYDDzzfD8DIZHaGV1IfusNXURQlAL7/3FEARpOZGV5JfWjwVxRFaZL+4STb9g8CMJbSzF9RFGVe8IPtjuSzrDvOSHJuBP8509VTURRltvK9545y7rJO1ixs58hQcqaXUxea+SuKojTBwGiKx/ed4s2b+uhKRBlNqeavKIoy55lM5/jMwy8xOJ72ffwH2/sxBt6yuY/OeISxOSL7aPBXFEWpwZP7B/nUwy/yoW88RSaXr3j8+88f5awlHZy7rJOuRITRZBZjfDvWzCo0+CuKotRgxLVu/vLlk9z+/+0seezkWIrH9pziLZv6EBE6ExGyeUMqW3mRmG1o8FcURamBlXHevGk5X/3lPr75xAGMMfxi9wlu/dqT5PKGN29eDkBXIgoULxizGXX7KIqi1GDU9e1/7KZNjKWy/PdvP883Hj/Irw4Osaw7zj/82mYuXNEDQFfcCaljySxLu2ZsyXWhwV9RFKUGNvPvaYvyz799Cb/++V8yMJLkf9y0id+8bBWJaHEQYVfCCamjc6Doq8FfURSlBqPJDO2xMJFwiN72GA98+FrCISEcqhxN0mkz/zmwy1eDv6IoSg3GUtlCUAeIRaqXSjsLmf/s1/y14KsoilKD0VS2ENSnotst+M4F2UeDv6IoSg1Gk9mCi2cq5pLso8FfURSlBmPJTMHFMxWdc6jgq8FfURSlBuWafy2i4RCJaEgzf0VRlLmOI/vU743pSkS14KsoijLXGUvWX/AFZ6OXyj6KoihzmHzeMJbO1q35A4XmbrMdDf6KosxpjDE8vOOYb8fNZhlPZzGGut0+4BR9VfNXFEVpMbuOjfKBu7bx0xcHAn9tG8Qbk31U81cURWk5wxNOoG1Ftm37+tTr9gE381fZR1EUpbWMp51A24oe+nYYe2Nun3mm+YtIWESeFpHvuj8vFJGHROQl9+sCz7m3ichuEdklItcHtQZFUeYf46kc0Jrgb+8mGgr+8Qhj6Sz5/Oye5hVk5v9hwDvm5qPAI8aYc4BH3J8RkY3AzcCFwA3A50QkjKIoyjSYsJl/Jhf4axdln8YKvsYU70hmK4EEfxFZBbwV+JLn8I3Ane73dwI3eY7fY4xJGWP2AruBy4NYh6Io849WZv62cNvoJi+Y/f19gsr8Pw18BPB++suMMUcB3K9L3eMrgYOe8w65xxRFURpmooWa/3TcPrY4PNt1/6aDv4i8DThujHmy3qf4HPMVx0TkVhHZJiLbBgaCt3EpijL3GStk/sHLPjaAd8QaK/h6nztbCSLzvxp4h4jsA+4BXi8iXweOiUgfgPv1uHv+IWC15/mrgCN+L2yMucMYs8UYs2XJkiUBLFVRlDONoubfCtnHaermN7WrGjb4n/GyjzHmNmPMKmPMOpxC7g+NMe8GvgPc4p52C3C/+/13gJtFJC4i64FzgMebXYeiKPOT1rp9Mg15/KGo+c/2jV6tHOP4ceBeEXk/cAD4TQBjzHYRuRfYAWSBDxljgr9fUxRlXlDU/Fvg9mlgipelMNBllss+gQZ/Y8yPgR+7358Erqty3u3A7UG+t6Io85PxdCvdPo21c4b5pfkriqLMGBOp1mv+jdARiyDizP6dzWjwVxRlTmMLq+kWdPUcSzWe+YdCQmcsMus1fw3+iqLMaSas7NOiHb5dDezutcyF5m4a/BVFmdO0cpPXaDLTcMEX5kZzNw3+iqLMaVpl9czlDePpXMOaPziOnzPe568oijJT5PKGyUxrdvjaxmyNav4AnXNgiLsGf0VR5iwTns6ZQbt9RqfRy9/SlYio20dRFKVV2GJvSIKXfabTztnSFVfNX1EUpWWMu9n1gvZY4LLPWKrxds6WLnX7KIqitA6b+S/oiAWe+dsRjtNx+3TGo0xmcmRasPcgKDT4K4oyZ7GZ/8L2GOlsHmOCG51oM/euabh97N3C+CzW/TX4K4oyZ7GOnAUdji4fZPZfnN87vU1eMLv7+2jwVxSlKrm84WuP7iPdgg1UQWA9/gs7YkCwwd9aNacj+3Rr8FcUZS7z9IFB/ub+7fzy5RMzvRRfrNVzQbsN/sEVfceSWUSgPRpu+LnWITSbN3pp8FcUpSq2XfJkenaO3KjI/AP0+o+mnI6eoQameFmKbZ1n70YvDf6KolQl6e6enfRpmvbisVF+/65tLRmiUi+VmX+Qsk92WsVeKEpFmvkrijInqRX8t+49xUM7jtE/nDzdyyowns4RC4cKwTZo2Wc6ej8UHUIjqvkrijKbMMbU5UG3ck/SR05J1njsdDGeytIeDxOPOKEsaLfPdJw+UHQIzeaNXhr8FWUecvfWA1z1f/+QXL62L95m/EmfzL/WY6eL8VSOjliEeMQpygaq+ScbH95uSURDhEOimr+iKLOLB3cc48RYipHJ2sHJBni/gm+xm+bMZf4T6SztsTCxQuYf3IVodBrD2y0i4rR4UM1fUZTZQjaX58l9pwAYniL4F6Udn+Bf47HTxXg6R0c8UpB9gtyPMJbMFvz606Fzljd30+CvKPOM7UdGChbOkSlkickaBV+bZdcT/E+NpwNtvWCZSGXpiIdJRIPX/KczvN1LVyKqwV9RlNnD1r0nC99PlfnXCv42858q4N7/zGEu+9hDPLjjWKNLnZKxVJZ2r+YfUPDP5vJMZnLTaudscdo6+3++dz26jzt++vK0XzsINPgryjxj655TBY18StnHLaD6FVLrKfg+uL2fP7/3VxgDx0aCt4ROpHN0xLxun2AkKLt5bDrtnC21NP/vPXeU+546PO3XDgIN/ooyj8jlDY/vO8XVGxYBMDJZW5aomfm7F4RklWz75y+d4I++8TQX9HUBrdnwNJHO0h4P3u0z0kRfH0tnjSHumZzh1Hh62q8dBE0HfxFZLSI/EpGdIrJdRD7sHl8oIg+JyEvu1wWe59wmIrtFZJeIXN/sGhRFqY8X+kcYTWZ548blQP0FXz+3j30s5XNheO7QML9/1zbOWtLB199/BSGBiVTwhWHH6hkmHrDmX+jo2ZTmXz3zz+TyDE60pg5SL0Fk/lngvxljLgCuBD4kIhuBjwKPGGPOAR5xf8Z97GbgQuAG4HMi0njnJEVRGmbrHsfl89rzlhANS90F36SPnFLL6vntZw6TM4a73n85ve0xOmKRQvvloLDD2zviEWLhYGWfZto5WxKRcFVJLJ3Nk8mZGZ3z23TwN8YcNcY85X4/CuwEVgI3Ane6p90J3OR+fyNwjzEmZYzZC+wGLm92HYqiTM3WvSdZvbCNFb1t9LRF6y/41vD5+wW4iXSO7kSUpV0JADrikcAzf/v+HTGn+VosHAos82+mnbMlGglV3UWddo8PzqD0E6jmLyLrgEuArcAyY8xRcC4QwFL3tJXAQc/TDrnH/F7vVhHZJiLbBgYGglyqoswpTo2nmx4JmM8bHt97iivWO3p/dz3Bv4ajp5bPP5XJ0RYrhpf2eDjwzN9OyWqPO8JBPBIKTPMfLQxvn37wj4VDZHKGvM8uarsf4eSZEPxFpBP4D+BPjTEjtU71OeYrfBlj7jDGbDHGbFmyZEkQy1SUOcep8TTXfuJHfPUX+5p6nd0DYwxOZLhi/UIAuhPRKXf4Jmtk/skass9kJkciUlRzO2KRwEca2tfriDkBOh4NBS77NLPJyzqqMvnKzydTI/MfT2VPy+zfQIK/iERxAv/dxpj73MPHRKTPfbwPOO4ePwSs9jx9FXAkiHUoypnIPU8cYCyV5VeHhpp6na17HH+/zfx72qYO/rXdPrX7/rTFisG/PRYubCwLCju8vT1mM/9wcAXfJoa3W2wdwm/XcSbn5Lt+mf9/+cKjfPKhF6f9vvUShNtHgC8DO40xn/Q89B3gFvf7W4D7PcdvFpG4iKwHzgEeb3YdinImks3l+fqj+wHYfXysqdd6bO8p+noSrF7YBlCX5m99/uUB3hjjCf4+HT8zORKeCVid8Uih935Q2MzfSjPxSJCaf5ZwSGibxhQvSzTsiBw20HuxF4TyzD+fN7x0bPS0tMme/mWtyNXAe4DnROQZ99hfAR8H7hWR9wMHgN8EMMZsF5F7gR04TqEPGWNm55ggRQmYk2Mp0rk8fT1tdZ3/8M7jHBlOsmFJB3tOjJPLG8LTmCwF8NT+QV65biFOvgbdbZEp+817HT35vClMtUpl81iXop/UMpnJ09tWdMq0xyNMnGhR5u8G/1gk5Gs7nQ5j7hQv+1lNh5gre/ll/rbgW+71H5xIk82b0zIzOQi3z8+NMWKMeYUx5mL3z/eMMSeNMdcZY85xv57yPOd2Y8wGY8x5xpjvN7sGRZkr/P13d/CHX3uy7vPv/OU+Vva28f5Xn0U6m+fQ4MS03jeZyXF0OMnZSzsLx2zmX81rnnODkJVVvHZP751AtV7/tt8OQEcsHPgmr7GC5u/KPtHGZJ99J8b55hMHfB9rtq8PeDP/0jUZUwzu5cF/YCwFFC8OrUR3+CrKaaR/OMnhocm6zt3VP8qje07y7ivXct5yJ2i/PDA96eeI+56rFhTvOHraouTypqoWbwO8HZHoDfKTGf8Lgfdxr2TSHosUMvWgsDJSe4nsU/97fOnne/jofc/5unHG3YZxzRCrMmAm63m/wYnS4H98xA3+cyHzVxSllHu3HeTNn/mZ72MjySynxtNTDlEBp/lXPBLi5leuZsMSJ/hPV/c/NGiDf3vhWLe7gala0dcG+AUd0ZKfodT949feIVlW8O1wrZ5B7mi1/XcKmX+Dmv+OIyMY459lp7KlNYvpYPsNlWf+3p/LC77HRzX4K8qc5fnDw+w8OuKbEY8mM+QNDE3U9ncPT2a476nDvOOiFSzoiNHbHmNxZ6zp4G+LveBk/va9/LAB3mb+3oBvLwQi/u0dJjO5Qr8dcDZ5GRPsyMdC5h/zZP51vn4ub9h5dBTwt6qmsvlC8J4u0SpuH+/P5QXf46NOoXfOWD0VZT5Szb1ig6lf6wSbZU+1uecH2/uZzOR471XrCsc2LOlsIvhPEA1LYcctOJu8vOstp1L2qZR6uhNR3+BZkfm73we50csOb7fyimP1rE/22Xti3FPM9ptVkC+5eE0Hu67yOwv7cywcqsz8R1TzV5RZzU9eHOCSv3+IE26Bzksh+Jd1zMznTaFI6fc8L9bqd77bERPg7KWdvDwwPi3p5NDgJCt620qcQjbzn0r2WdhRGfwn005wWtAerbjDyeacvjXlmj8Q6EYvO7zdEo+E6g6aO44W96H63S2ksrnAMv9Mlcx/aXec0WTphq4BlX0UZXaz48gIqWy+UEj1Ui3zH09nsVL/ybHamf/gRJqueKQQQMAJ/sOTGU5M8Vw/Dg1OlBR7YWrZx0o0ve0+mr/7fU97rCLztzWAErePG6THA+zvY4e3W+LR+mWf7UeGC9/7Zv6ZfKFT6HQpFHwrNH/nl2B5t3MX5i36WtlHM39FmaX0DztBf2iiMnAWM//Sx7ye+ql6uQ9PZOhpL+0o2UzR9+DgJKt620uO2YJvVc2/IvOvdPv4Zf62NuCX+Qe50csOb7c0ssN3x5Fi5u9XhwhE9pki81/mBn/v74Jm/ooyyzniyjJDPoHTBv3yoOod6XdyCtlncCJd0Not1qO/u0G7ZzKTY2A0VZH5dyUiiFB1o5cN4r224OvV/D3F4GQmVyJF2YuB1y3T4doxg2zxMJ7OFWyeUL/V0xjDjiMjLOuOA9UKvs3LPtU0fyvz+AV/6/bRgq+izFKsJj9c5toxxnhkn9Kg6q0BnJgi8x+cyBTkFktfT4KOWJiXG8z87b6CVQtLg38oJHTFI1U1/6Qnu4diwAeP7NMWJW9KWxjY55VbPcEZuB4UE6lsoZAMRavnVDWR46MpTo6nuWS1M1+qesE3IM2/SsF3eY9z8bHBfyyVLeyF0MxfUWYpR23mXyb7TGZyhUBYHlQbyfyHJzOFjNsiImxY2tnwRi8/j7+lu0Zzt8lyt0+2Mvjbx1I+j5V39YRgRzmOp3OFOwpwdvga499Lx4vV+y9d2wtUyfwzeeJN+vwLmf8Uso+1ex53Zxwv7IhN+XcIgiB6+yjKnObUeJq9J8Y5PpLk2EiSs5Z0cu251VuIp7K5glunXPbxSj3lBV/784qeRF0F3wXtlVOkzl7SyaNud856sS0hymUfqN3czco+VvMv8fkXJCH3riCTx7pIrYZe3tUTCHSX77hP5g/Ov0+sRta+/bCj91+0yg3+ZZq/MSYgt4/jrEqXBXJ7J2Btt9buaSWfVQvaeMHdg9BKNPgr85qJdJZr/ucPS7ToJV1xnvjrN1R9jvViQ2XmXxL8ywu+ruyzbnFH4c7Bj3zekY68jdEsG5Z2ct/ThwuNx+rh0OBkhcffUjP4l2X35S0d4pFQoaib9HEC+Wv+ARd8yzR/cDL5rmpPwrF5rlvUzqJOq/mXWVXzhryh6eAfD/s3drM/t8fCdCcixczfE/yfPTSMMaapxnJTobKPMq/Z1T/KeDrHX7zpXL73J9fw+9es58RYqqbm6g3cw5OlGfzwhDf4lwY6K/usXdRR0+c/ksxgDBWyDxQdP43o/n4ef0t3Ilp1jm8yk0PEsWzGIqESV4zdxOU3ON3eFXitnvFIiHBIAh3laIe3F98jXLEWP7YfGWHjiu6Si4UX+3NQm7yqtXeIRUIs6oxzyv2dsbLPyt4297zWSj8a/JV5zQv9zu31jRevZOOKbjYs6cSYot/aj6OuzXNpV7xq5h+LhHxknyzxSIgVPQlGk9mqFxj7muUFXyg6fhrR/Q8NTrDaR++H2pl/0m3OJuL0tS/P7tui4UJ2733MZtJeq6eIuANdgsn87fD29jKfP/i3m7CMJDMcODXBhSt6fC9c3ucnmvT5F2Sf8szfDerRcIgF7VFOjTuJwMBYilgkxJKuuHtea4u+GvyVec0LR0fojEcK2dayHkcaOTZSK/g7j13Q111V81+9oK0iqI5MZuhuixbkhmpef7vpp9zqCbB2UTuRkDTk9T80OOmr9wP0tNeWfWwAT0RDZb198iXBv6Tgm650+0CwoxwLw9vjfpp/9aC50/X3b+zrLq697GIRVOYfCYcISXXZJxYOsbAjzqlx5/MfGEmxpDNedX9A0GjwV2YFf/7NZ07L6LpydvaPct7yrsKQErvrsn+4uizTP5ykKx5h5YK2qpn/6oXtPm6fLN2JCIs6naBeTfqplflHwyHWLmqvO/hX8/hbuhMRkpm8/0CWdL4QINui4VK3T9rpelkIuD4bwBJlwdMZ4h6M7FOY31ui+U8t+9i2DnXJPk1m/uD8e9WSfRZ2FDP/46MplnTFiVbZHxA0GvyVGeepA4Pc9/RhvvjTPVX151ZgjOGFoyOcv7xYHiwE/5qZ/yTLexL0tkUZnkyX+MpHJjOIOLpthc8/maErEWWxG/yrNXcbcusIfpo/ONJPvRu9atk8wdvfpzIjd0YxOiEiEQ2XtnF2Nf+C7ONj9fTL/IPy+ZcPbwdP5l9D9tl+ZITFnTGWdhUz7Mrgnyt5vWaI+bSZtpl/NCws7IgzOO4M1Dk+mixZV6u9/hr8lRnnCz9+2ZEVMjm+/fTh0/a+R4eTjCSznN/XXTjW2x4lFgnVlH36h5P09bbR2x4lkzMl9sXhyQxd8Qi97Y5/vuTCkMw6sk+HI/tU8/oPujKAn9UT4Lzl3ew7MV4SjKtRy+YJtTt7eoewJ6Lhkr79Rc3fCSGlxWArm5SGl44AM//y4e1AVQ3fy44jI1zQ142IICK+u4JTmWBkH3Cknaky/3Quz3g6x/HRFEu741V3BgeNBn8lUPYMjPG1R/fVff5Lx0Z5cMcxbr12A5tX9nD3YwcCHfhRixf6HQngAk/mLyIs707UHKB9ZDhJX3eC3jYnM/fq/sOTTk+e7kSUbL70wjA6maErEWGhzfyreP2H3LuHroR/8N+0opu8Ke1MWY2pMn8b/P3uuCbTRc2/LRou3eFbkH0qC77eQrGXIDX/6cg+46ksLx4bZfPKHs9zKpvBFTX/YDL/yoKvzfxDhbpO//AkQxMZlnYlNPNX5iZf/vle/ub+7ezqr2+Tyhd+soe2aJjffdU6fueKNew6NsqT+wdbvEoHO8zj3OWlrvDl3Ymqsk86m+fEWIrlPYlC4zXvYJbhyQw9bdGinJIs3fTVnYjSFY8QC4c4MV5N80/T0xatOqh98yoneD1/eNj3cS9Fj3/c9/FanT0nM8VpVvbOzFKUffytnn5OmfZ4cKMcfTN/zyYvP7btHySbN1x51qLic3zm/hZknxZp/t6Cr63/7Op3ZLylXfGqO4ODRoO/EihPHxgC4L6nD0157uGhSe5/5jA3X76ahR0x3n7RCjrjEb6x1X+odtDsPDrCqgVthe6WlmU9iaqyz/HRJMY4fXbsJiyvt98G/24fLd2RfSKICIs6Y1Uz/8EJ/w1eluXdCRZ1xOoM/hOs7G0rFLTLqdXTP+lx+7TF/KyeoUJR1y/zL6cjFg4u809XZv4xn+Kzl8f2nCQSEi5bu6BwrOWyj8+MgUwuTyQkhEJSyPx3uXehS7vjVXsCBY0GfyUwJtJZdh1zsun7nz4y5ZzaL/50DwAfuOYswPmP/GuXrOS7zx2tGG/XCl7oH+UCj95vWd4dp3846Ss/WTnI0fyryD5t0eJ8XDfzT2ZypLP5wvFFnbGqVs+hiXTVYi840tSmlT08V2fmX03ygdpzfJNlmn95P/+2aHGTV3m750TMJ/gHmPlPR/Z5bM9JXrGqp+w5lQXZIGWfaDhEOlv6e5TO5gsB3tZ/7H6TpV0JzfyVucdzh4bJ5Q2/fulK+keSPPpy9R40g+Np7nniADdevLLgsQd41xVrSGfz/MdTU985NEMyk2PPwFiJ3m9Z1p0glc37SiG2lXNfT6JgxRwqyfyzbubvBBh7VzDqOn+6E87xRR3xqgXfoYlM1WKvZdPKbl46PuY7J9hLLY8/TC37FH3+ZZl/2gnwiUilzz+ZyVXYPMHN/AMa4l4+vB1qyz7jqSzPHRoukXyc54R9NP9c4bFm8c/8TSHAL+hwPn+bNJXIPpr5K83ys5cGCsXNINh3YpxPPfRixX+yZw46ks9fvOk8uuKRmtLPT14cIJnJc8ur1pYcv6Cvm0vX9PKNra0t/O4+PkbeUOL0sSzvqW73tENclvckCoHTWjONMYWNXOWav/1q5aBFnbGqE7mGJmtn/gCbV/aQy5tCxuhHMuM0oKsV/GNufx7f4J/Olfr83SCZzxtSWWeTVygkxMLlrR/yFTZPcDR/Y0p7BE2X8uHtUNTo/TLmJ330fvucCtknQJ9/LCwVm7VSnsy/Mx4hGhb2n5xAxGmid8YXfEXkBhHZJSK7ReSjM7WO2UY2l2f38dEpJRMvR4Ym+Yfv7SzIKF6OjyZ5/53beNcXt9a0L9bLocEJ3vXFx/jMIy/xw53HSx57+sAQaxe1s6K3jbds7uOB5/urTm7auvck3YkIF67oqXjs5svXsOfEOL86NLWsMV12uk6Z830y/+JGr8rP6+hwks54hO5ElIRrdbTZfTKTJ53Ll8o+ZVO9utzMf3FnnJPjKd8L3NB4ZS//cja5jpVa0o91+qxeWF32Aehui1Tx+Zdu8pp0h7Yky9o3xKMh39YP5RSGuAfQ32c8nSMalpLundV8++Cv94O/7GP/LoG5fXw0/5jb+kFECl1TF3XEiXgG0p+Rmb+IhIF/Bd4MbAR+W0Q2zsRagsQYw6nxNPtOjPPcoWEefflkSTFwKk6MpfidL23lDZ/8KVf8wyP87f3P8/jeU+SrXAh2Hx/jL/73r7j2Ez/ijp/u4R++v5MXj5Vmgl/5xT4yuTyT6Rx/es8zVS8q2Vyebz99mC/9bA/ZKr90x0eS/M6XtjKWytKViPCD7f0ljz9zcIiLVzttcn/t0pVMpHM8uP2Y72tt3XuKV65b6OtoedPGZYRDwkM7+n2eGQwv9I+SiIZYu6ij4jHbZ93vYtk/nCzcGQD0tsUKso/NnnvaooUgP+wG1aLs42b+HTGSmXyFBp7J5RlNZQs20mqsdPcZbK8R/A+emiicWwu//j7ZnHMh87Z3yOUNmZypaN/gjE8st4H6uH0CHOU4kcqWZP3gtFOIhMRX9vHT+4trb01jN6ju83kgAiYAACAASURBVPdetGzR1zqyYqep4DtTLZ0vB3YbY/YAiMg9wI3AjqDf6IHnjzI0kSFvwGAwBsIhIRwSIiEhHgnT7u5U7IxHCja9rkSkqkMCnFvfoyNJ9p0Y59lDwzy5f5BnDg5W3MpHw8Krz17MWzb38aYLlxfkgHKePTTEH3ztSU6Np/nzN57LzqMjfPOJg9z16H7eftEKPv3Oi0sC5Q9fOMatdz1JJCy8+8q1/JfLVnHzHY/xyQdf5AvvuQxwpIavP7qft2zq4zXnLeEj33qWz/94N3/0+nMKr5PN5bn/mSP8y492s/fEOACP7DzOZ3/7kkKDKXD60Lz7y1sZGE3x9Q9cwTe2HuAH2/tJZ51f5KPDk/SPJLnEDf6Xr1vIyt427nv6MDddsrLk7zowmmLPwDjv3LLa97PobY9x+bqFPLj9GH95/flV/w2a4YX+Ec5b1uV78VlWo8XDkeEkfd7g3x4tyD7e4B8Jh+iMRypkH+vdt9neybF0SUCyr2G14GqICJunKPo+tvck0bD4Slte/Dp72g1dbbHiDl/neK6ifUOibHB6MluUi7wU2joHkPmPlXX0tPj59sdTWZ49NMyt155V5Xx/t0+tmQD14hR8K62eVvYBCnbPpe5YyehpKvjOVPBfCRz0/HwIuKIVb/SPD744rYHXItAVj9Dt3sLHIiHyxpA3hmQmz8FTEyUZw/rFHbzm3KVcuKKb3vYoXe5zfrn7BN999ig/2vUsf3P/8/zGpat439XrOHtpF8YYXjw2xsM7j/GZR15iSWec//jgqwq39GOpLF/62R4+/fBLdCUi3H7TJkSEJ/ef4r/e/RQX9HXzlfe9ksVuo7APXLOeTz/8Er86OMRFqx3dfDSV5Q9fs4FNK7v5xe4TfOrhl9iybiEAD+04xgPP93N4aJIL+rr5wrsvZSSZ5W++/Txv++ef8al3Xkwyk+OB5/t5aMcxJtI5vvK+V3LpmgWcHEvzrScP8diek1x77hKecS2eF69xbqtDIeGmS1bw+R+/zPGRJEu7iwHziX2nALh8/cKqn/+bLlzG3/3nDvYMjHGW28Y4KIwx7Dw6yhsvWOb7eCwSYlFHrKrmf96y4qCXnraob+YPTnG3KPu4mX9bUfYBODGeYs2ioixj9wxMpfkDXLiihy//fI87eKQyEP5k1wBb1i6csu9/T1u04u9aPoTdZvnJdK44o7dk92/pfF9f2ceOcgwg8x91W2WU4+fbr6b32/PLg2wq60hK1fZZNILfJq96M//yITBBM1PB3+9TrfibisitwK0Aa9asmdYbfeMDV5AzBkGw/5Y5Y8jlDdmcU7iazOSYSGcZS2YZnswU/oxMZhh1j2XyhpBAWIRoOMTrzlvCusUdrF/Uwfl93YVMrpzXnLuEj775fJ45OMQ9jx/kfz95iLu3HuCSNb0cPDVRuFN49dmL+czNFxc6PoJTDPrTN5xLKpvn8z9+mUUdMd72ihW87ytP0NfTVhL4Ad7/6vXc+ct9/OODu/jie7fw5Z/v5dVnLy5sCvrYTZt45uAQN9/xGOD8Yr5qwyL+9u0beeMFywp3OptW9PDBu5/kXV/cCjgXwddfsJT3XrWuoJlec85i2mNhfrC9n2vPXcLTB4eIRUJs9GSZv3bJKv71Ry/z7WcOc+u1GwrHH997ivZYuHCR8+ONG53g/9COY/zBa4IN/gOjKU6Npzm/r/rIj+U+Xv9MLs/x0RTLe4oySm97lH0nHHmlIvi3FTNq28vfa/WEyl2+haZuNXz+ls0re8jkDC/2jxX+jS39w0le6B/ltjdPfefU0xYtuE0s5UPYbZY/mckxmXbvCqzmX9br39sWwkt7gKMcx1JZOhOV4cvPt19N7y+eXyn7BCH5gBPIy7X7VFnmb2OHHbZzugq+MxX8DwHee/5VwJHyk4wxdwB3AGzZsmVal0FvxjlTiAiXrFnAJWsW8JEbzuPfHz/AA9v7ueacJVy1YRFXnbWoZlHuI9efx+B4mn/+4W6++ot9tMXC3PV7l5cEfnAkhf/62rO5/Xs7+av7nmNgNMWn33lxyeP/6z2X8fXH9nP1hsVce+6SCg0UnI6H//nHr+beJw6yYWknr9qwqOI/QyIa5jXnLuGhHcf4Hzdu4ukDg1y4orskozl7aSeXrV3APY8f5PevOauw3X/r3lNctnZByX+AclYtaOfCFd08uOMYf/CaDVXPmw47XYfM+curyyHLuxMV07YGRlOFDV6W3rYYQ5POXU9l5l/U0keSGcIhKexILbZ1LpWWBidsX5+pM//NnqJvefD/yYtOMf4151UfR2nxm+NbPoS9kPln8sXGbR4baPkMX3/ZJ7hRjmOprG/C5RfMq+n9xfPL3T7Nj3C0VM38/YK/K/uc6T7/J4BzRGS9iMSAm4HvzNBaTiuLOuP80evP4bt/fA2feufF/NaW1VO6MUSE239tM2/d3Ec4LNz1/surPuc9V61lWXec+54+zOaVPbxqQ+mt7vnLu/nYTZt58+Y+3/8Mlu5ElA9ccxavO29p1Szo+guXc3w0xbb9gzx3eJhLVldmVr/tOne27nWknuGJDC/0j3D5uuqSj+VNG5fz1IFBBkZrDztvFLsz1s/pY/Hb5WuHuFRo/tVkn7ZoQe4ZTTpFcnsBXNRh2zqXZ/7pwutOxeqFbXQnIjx/pFL3//GuAZZ3JzhvWa2BhhTWOZrKlhgLygO8/TqZyXm6dtp6QDHzN64s6hv8Y1bzDyDzT/qPsSz37Vu930/y8Tsf3OHtAQV//5bOpiRJKmb+pcH/jNzha4zJAn8E/ADYCdxrjNk+E2uZK4RDwr+86xIeu+26mhlrIhrmT65zCroffO2Gls4Afd35S4mEhM8+8hLJTJ6L1/RWnPPWzX10JSL8++NOy4Yn9p3CmNp6v+VNFy7DGHhkp79jaLr8eNdxNvZ1s6CKVAdO5n9yPF2SFR4tbPAqyj497VFS2TzJTK4Q/K0W3d3mKfi6Td0s1mBQVfapI/jbnb7lbR4yuTw/f+kErz1vSV3//j1tUYwpOpKgUvO3nvfJdM4zprHo9rF3Cjbr9tf8rdun+cx/xL2YllPu23/qQHW9v3i+j+zjs/7p4NvYLZsvTPmC4i7fJa7sEw6J7xCYoJkxn78x5nvGmHONMRuMMbfP1DrmEiLim1GV867L13D/h67mzZuWt3Q9PW1RrtqwiJ/vPgFQcPp4aYuF+fVLVvL95/oZHE/z+L5TxCIhLvI5t5zzl3exemEbD+4ILvgPjqd5cv8gb7hgac3zrNffO6zd+v7LrZ7gBG0b4G2hsDtRlFNGktmKHkKLOmOcrJB90kRCUvdw9k0re3jh6GhJoHj6wBCjqSyvOXdqycdZZ8RdY1H6sdl9vCzzT2aLBV+vDdQGUL/5vRYreQUxynEsVaXgWyb77HMdbNXqO3HXh++96wlS9nEy/1LFurzg+/rzl/I3b9tYsEmDc9E4IzN/pbWICBet7m1p1m+5/kLnArO4M1Z1J+nNl68hnctz39OH2br3FBev7q3rIiYivGnjcn6++0QgRUJwdhbnDby+itPHssxnl++RoSTtsXAhWEIxQx+aTBf6+li8csqo29HTy8KOyuZuQ5POBq96/+02rewhncuX7O/48a7jRELC1ecsrus1/Fo8lAd4r9unfFhLwpP5l28A82KHuDcr+2RyeZKZfHXZxxP8+0eShEPC4g7/rqZW0vQWZZ2Cb4Cafy5fspmv3OrZFgvz/levL3EXRcOVdyRBo8FfaYo3bXSC6MWrF1QNWBf0dXPx6l7uenQfzx8e5oo6JB/v66ezeX6yayCI5fLIC8dZ3BnjFTWcRuC/y7d/ZJK+nkTJ39O6coYmMhXBvyCnpLKMTFbKFIs64hWjHKdq6lbOlrULiISEv//PHYWs+8e7Brh07YKKi001/IK/X4C3xysloWLrh2rze8EzxL1Jn7+9ePgH/1LZp384xdKueNU9O35jKB3NPyi3j/O+3uw/XZb5V1vXGbnDVzlzWNqd4L+/9QLfDTRe3nX5GvafnCCXN3Xp/ZYt6xbS15PgCz95uaGWF35kcnl+sus4rztvac0NfFAM/rboa4xhV/8oK8p2y/a0Vw/+BTllMuNk/mX2zcWdsYpRjkNTtHMuZ0VvG59658U8sf8Ut35tGwdPTbDj6Ejdkg94Brp4g3+ZndPP7ZPwWD3tRqmCXFQleHbEIk37/G1twtfqWbbh7NhIsrBpz4/i9K/iBSOVzQXS1wfwbdVQ7vbxfV44pAPcldnPB645a8qA/raL+uiMO3r4pWsqXUHVCIeE295yAc8dHuabTxyc+gk1eHL/ICPJLNdNofeDU6xNREOFzP+JfYO8PDDOWzf3lZxns/ThKrKP81imquZ/ajxdojcPTmQayvwB3n7RCv7nb7yCn710gnf+r0cBeG0dFk9LPbJPwuP2SWVyiBSz5oRnY5W9A/DL/CGYUY7lHVK9+Mk+y2sFf5820IHKPrZVg+f1y2UfP6Ka+StnCu2xCH/4mrP49UtW1rSY+vH2V/RxxfqFfOIHLzTV5/+HLxx32m2cM3VgLIxzdDP/O3+5j562KDdeXNqqopbsY4P90ESm0A/Jy6KOOLm8KSm0OrJP/Zm/5be2rObv3nEhR4adIeAbp2jp4MXuKfDehRSye4+dE5yLgm3cZuWvRNQJVLm8qbholNMRb36U41hB9qlW8C1eXI6NlPZi8jsfyjP/4GSfqG/mb6aUfWI+bSGCZqY2eSnzEG9PoUYQEf7uxgt562d/zj89tIuP3bR5Wq/zyM5jXHnWorqdNMu6Ha9//3CSB7b38/5Xr6/IaNtjYaJhYWjSX/MHp+sqUCH72F2+J8aKOn89vfyrccur1tHT5nQbbaTY3xYL09seLalvJDM5QlLMXGPhECEpDf6WuKenf3k9oJz2WJiJJjX/sZRzsfSTfWIet89EOstoMltb9olUDqMJdJOXz27dujJ/n/0BQaOZvzInOH95N++5ci13bz1Q1/jCcvadGOflgXFef/7Uko9leY+T+X9j637yxvDuK9ZWnCMi9LTFODacdCZ1lcg+TnA6NOi0fyjP/G1QOnDKsSPawNqo7OPlpktWcsM0LL7OjubJws92eLu9iIiI09Y57bR38Lq1CnN8M/mC28fP6gnuEPegNP9qBV83kBetuf5OHyhaWUtkn0y+ZZq/Maaugm8som4fRSnwZ288l4XtMf7yW88W+vHXyw9fcNodNBT8uxMcG07xjccPcN35S0sasHnpbY+y322f7Kf527765Zr/xat7aYuG+dELjpPJau7TkX2aZUVvW0k7C78WDbaBW7Ksd09Jx8+yDWDlBDHKcWrN35k7YCW7ZV0zJ/uUZ/7W9RML174zU5+/onjoaYvy8d94BYcGJ3jLZ3/Gh+95mv0nx6d8Xiqb47vPHuHspZ2+/fursaw7QTqX58RYmvdeta7qeb1tUfafrAz+nbEIIp7g31YarBLRMNeeu5iHdx7DGMOg29qhnr4+QbO8J1FX8J9M531kn6J0Ut4TqJyOePND3Auaf5XGbnkD2bwpOLWW1aX5t0b2KR/Gbr9OJfuo5q8oZbxx4zJ+9pHX8YWf7OGrv9zLd589ypa1C7j67MVcffYiXrGqt+Q/1o4jI/zZN59h17FR/q+3NzYvyBYKz1rcwavPrr5hqrc9yon9jl/fG/xDIaE7ES3IPn6+++suWMYPth9j+5GRQkbbiNUzKFb0JDg1nnbm77rzessDeFvMOT5Z1rK5kPl7+v5Uy/zbY80XfEfdJnm+G8k8oxztPIa63D6e3kRBb/Ky64Fi8K9H9hma1OCvKCX0tsf46JvP5/euXsdXf7mPn740wKcefpFPPuQUGi9e3cuWdQsQ4PM/eZne9hhf+d1X8roGJB+gsGP5PVetrbkvoMczdat8WE93W4TDVWQfcGQoEWeAznnLOwt/v9ONbVPdP5xk3eKOigAPtoGbE+C9tY2C5p/NFwqniSrBrSMWZiKTI583U+61qIZt6uZX1PZaN4+NJOmKR2q6y8p9/pmcM/ApqN4+0XCp5m8vAlMXfIVM9szs568oTbO0O8FHbjifj9xwPoPjaR7dc5LH955i2/5T/OuPdpM38JbNy/nYTZurzluoxeaVPXzlfa/kmhpZP5Rq9BXBPxHloHGCv18jssWdcS5ds4CHdx4rtPSdEc3fvcs56gb/ZCZfEfztHN9kJsey7mIR1QZce2GIhUNEqgQ3O8Q9mc1VjGGsl9GUf0dPZy3FYH5sJFlT8ik9P194nvd4s5Rn/um6M/9wy33+GvyVM4IFHTHesrmPt7ibsMZTWfpHkpy1uGPaPY5EhNedN/XdglemKQ/c3ouBX/AHuO6CpXzigV3scucMzJTmD8W21eXZPThSzlgqW6H5l+wBSNfeHesd5Tjd4D9WpaMneDL5TJ7+kWTJRcr3/LJNXsX5vQFv8nILvfYiUM8O3zO2q6eitJKOeIQNSzpPS3M7b8Av7zRppZ6OWLhqNmzHSX7nV0eIRUJVbZKtxLaptkXfZCZHW9k6Em4Pn8l0aT3AG0BTWf8RjhY7d7eZFg+jVXr5l6/l2HDt1g7gvViUtqQOzO1Tofk7F4EpC74R0R2+ijLb6XEzdW87Z4t1+Pi1H7Y4LqR2To2nWdBAR88gaYuFWdAeLcn8/WQfK+0kqhV80/4jHC1BjHL02y1tsRn7ZCbnjNycKviXyz6FVtbByj7lbp/ZsMNXg7+iNImVfcr1fihm/uU2Ty8iwnXnO9n/TEg+luU9bYWNUZPpSqun3eSVrGL1TLlN3xI1suYgRjk683v9L6Y2Yz8yNEk2b2q2doCi/NIq2ccObUmXvX5Uff6KMvexso9f8PfO863FGzYurfoap4sVPQmODLnB39fnH2IslSWTM75Wz1Q254xwrCPzb8buWVP2cTN2u+9iKtlHREr6AbVM9mkw849q5q8osx87zcs383ePVZMpLK9ct5DuRKTQ72cmsO0sAF+ffyIWLsg1fu0dbLvn8lqBl84ARjmOJjNTyj62ZcZUso99jvX5F2SfFvX2qbvgGwmRzZuSjq9Bo24fRWmSnhqZv5V7yp0z5UTDIb743i3TsqQGxYreNk6Npxn3ye6htFlbwqfgm3RtoItq/B3sKMfpav7pbJ5UNk/XFAXfA6fqy/zB8fRXZP4zrPl79wckQsHchZSjwV9RmqQrHiEktTX/qTJ/gCuqDBk/Xdgsea8797Zyk1fxZ+9j0bAzcNz2/ak1orMwxH2awX+8RmsHKGbs+09OEBJnYM5UlGT+Acs+0SqZ/1Run7hHLqpn5Ol0UNlHUZokFBJu2LScqzZUBu96Nf/ZQF+vE/z3uMG/XLtvqxL8RcQZ6JKp7PtTTnGI+/Rkn7EaIxyhmLEfGZpkSVe8qr225DmeDppBb/KKhAQRzw7fenv72DuGFur+mvkrSgB87ncu8z1u5Z6pZJ/ZgPX67x3wz/xLgn+sNHjFIyG3q2ftgq8d4j5dn78dfFPNOmsz9rypT++3zynIPplgM38RcYq3uVKf/1QXl/K2EK1AM39FaSGLO+NEw0LfFJbD2YBd454TY0Bl8Pfq4L7tnt2unrWsniJCRxND3Mfc5ndTFXyhPr0f3Lm/5VbPADfaxT3OnXplH78hMEGjmb+itJCFHTEe+rPXFJrEzWYSUWejl9X8y3caV5N97HOdxm65iruCcpoZ5TiV7BObTvAv0fyDlX3AGeXYcMG3rFDcCjTzV5QWs25xR13a82ygr6eNPdVkn1jY93twguVYMkM2X+kSKqc9Fp621bNWL39wNHa7yXqqDV6WEtkn4IIvlO7WTde7yats81kraOo3UkT+HxF5QUSeFZH/V0R6PY/dJiK7RWSXiFzvOX6ZiDznPvZZmYm97Iqi+NLXkyh6+ct9/jUy/3g0zJA7iWwqd0pHfPqjHEemkH2cTVvO+zeU+btB1g6jmSozb4RoRIqN3eos+MYLmX/rfP7N/g0fAjYZY14BvAjcBiAiG4GbgQuBG4DPiYj9jfg8cCtwjvvnhibXoChKQFjHD0xR8C2XfSIhhibqC/7NDHEvaP7x6gV0q9fXXfB1JStwMu1oWCp6NDWDN/MvyD51DHCH1mr+TQV/Y8yDxhh7CX8MWOV+fyNwjzEmZYzZC+wGLheRPqDbGPOoMcYAdwE3NbMGRVGCwzp+oLbP3++uYMgdQzmV7NPTFi2MrGyUsZQzxatW51ObNdca3F5+fqGrZya4+b0Wb2/+dDbvSFNTXFzKu4G2giCFyN8Dvu9+vxI46HnskHtspft9+XFfRORWEdkmItsGBgYCXKqiKH54XUkV7R08AbdC9omECgPoa3X1dN6jjSNDkzj5X2PUmuJVXMv0ZZ9UNhd4S+1YWEoy/3okJVsTmNGCr4g8LCLP+/y50XPOXwNZ4G57yOelTI3jvhhj7jDGbDHGbFmyZMlUS1UUpUm8RVK/rp7gFFXLNetENIxtQzNV8FzZ28Z4OlfQ7xthtMYgF0s8EqI9Fq7qCConUSb7BJ/5F90+6Wx+Sr3fPseup1VM+ekYY95Q63ERuQV4G3CdKV7KDwGrPaetAo64x1f5HFcUZRawoobsYzN6P1knUWMPQMV79DrvcWRosuEuprVGOFri0RDLuxN1z0Uo7+oZpM0TSjt0pnOmruAfn+1WTxG5Afg/gHcYYyY8D30HuFlE4iKyHqew+7gx5igwKiJXui6f9wL3N7MGRVGCw2b+Iam0I9rNW347eL3Z8lSa/8oFxeDfKLVGOFoWdcRZv7ij7teMR8JkcoZc3pDK5AJ1+oCTxXtbOtdzcTkdBd9mN3n9CxAHHnKvso8ZY/7QGLNdRO4FduDIQR8yxtjy/geBrwJtODWC71e8qqIoM0IiGmZhR4xUJleROYdCQiwSCiDzdy4wh6cR/EdTGZZ21dby/+m3LvLVl6th3UG2Y2g84EZq0TKf/1Qef6icA9AKmgr+xpizazx2O3C7z/FtwKZm3ldRlNbR15PgmNvXv5y2aLhK8K8/81/cEScWDk0r+I8ls5y1uHbYWtxZn8vHUhzlmCOVzQUu+5Rn/vXcWRQHv89S2UdRlDOPvp5E1ew9EQ35yj4lwX8Kt08oJPT1FqeGNYIzwjHYrjSlA+iD1/xj4cYLvtHTYPXU3j6KopTwe69eXzUwO5l/ZfDyBsxajd0sK1y7Z6OMJrNVB7lMF+8M4lQmz6KOgN0+JQXfxjL/WSv7KIpy5vGqDYurPtYWi/jKOvGSDWBTB7cVvW388uUTDa2rMMUr6Mw/Wib7BOzzL2nvUK/Vcw4UfBVFmUf85fXn0unTWiHhZrMhmbp1AcDKXqeukMnVFwxh6o6e06X1sk+4ZJOXHWJfi1BIiIREg7+iKLOD15+/zPe4zfzbouG6/PUrF7SRN3BsJMmqBe11vbft69MZ8FS00oJvazZ5eYe51Gsl9W4OawVa8FUUpWls5l/vvNniRq/6i76jKad9RPCZv1fzb4Hbx23vYIyp2+oJpRbRVqDBX1GUprFBv/HgX3/Rd9TN/LsD1/zLZJ+ge/u4F5Ns3rhWz/o+I+8dQyvQ4K8oStPYbHkqm6fFtpFoxOtflH1ak/knM62Rfby7dVMNZP6OS2j29vNXFEXxZP71hZS2mLOTuKHg37KCr7PmUff1W7HJC5xibyaXr6sgbp+nmb+iKLOahKfgWy8rehONyT4pO8Ur4IKvu+YRtyV1Kxq7gZP517vDF9zNYar5K4oym7EZf72aPzS+0Ws06QTnwH3+bjC2LaaD7u3jbc9cr88fnP0BmvkrijKrsTp5Q8G/t43Dg/UPdRlLZomEJPDMvBD8W5T5e/v0NGT1VLePoiizHZv5NyL7rFrQ2FAX29en3j799WIvXCPJFgV/b+bfwKa2aFg1f0VRZjnT0/wbs3vW08t/OkTDggiMTNqCb2vcPhNpp6t9rF63T0Qzf0VRZjlW2qjX6gmNB/+RZNa3tUSziDhSUiHzb5HPf9wtWNcr+8R1h6+iKLOdUEhY2hVnaXf9vfTtUJe6M/9UJvCOnpZ4JNxCt4+T6dvg35Dso719FEWZ7Xzvw9c0JMvYoS6H6g7+2SmneE2XeCRU2EEctOxjLyZjDWb+6vNXFGVOsLgz3lDgbHSoy2iLNH9wpJ7WuX2cz6TRzF99/oqinLE04vUfS2YD391riUfChU1k9e5SrpdoxJV9CgXfen3+mvkrinKGsnJB/cF/tAUjHC3ebD/wls7haco+4RApzfwVRTkTWdHbVhjqUosjQ5Oks3l622ItWUdp8G9Ne4eGZR91+yiKcqaysjdB3sChwdrZ/z//8CWiYeHtF/W1ZB3ebL9VBd/xlCv76A5fRVHmO5tX9iICN/3rL/jcj3cXsmMvewbGuHfbIX7nirV1T/1qFK+3P/AZvhWZf/2bvPIGcvnWtHXW4K8oyoyxcUU3//lHr+aytQv4xAO7uPYTP+LbTx8uOedTD79ELBziQ687u2Xr8Eo99RZk6yVWZvWsV1aKtniIeyB/SxH5CxExIrLYc+w2EdktIrtE5HrP8ctE5Dn3sc9K0I06FEWZU2xa2cO//e4r+Y8Pvor1izv4028+w2cfeQljDDuOjPCfvzrC7716HUu66t9A1ihW6omFQ4RCwYakaFnBtxHNH1oX/JsunYvIauCNwAHPsY3AzcCFwArgYRE51xiTAz4P3Ao8BnwPuAH4frPrUBRlbnPZ2gV84/ev5KP3PcsnH3qRI0OTHBtJ0p2IcOs1G1r63jYbD7rYC9Pf4Wt7ALXK7hmEb+pTwEeA+z3HbgTuMcakgL0ishu4XET2Ad3GmEcBROQu4CY0+CuKgpPt/tNvXsSKnjb+5Ue7AfjIDefR0x58Tx8vVucPWu8Hp3dQLBwqNnZrYIcvzNLgLyLvAA4bY35Vpt6sxMnsLYfcYxn3+/Lj1V7/Vpy7BNasWdPMUhVFmSOICH9xJAB/dQAAB/dJREFU/XmsWtDGD7b387uvWtfy97SyT9BOH0ssEir6/BuUfVq1y3fK4C8iDwPLfR76a+CvgDf5Pc3nmKlx3BdjzB3AHQBbtmxp3SRjRVFmHTdfvoabLz89SV8rZR9wAvmw2z6i3sy/UPCdqczfGPMGv+MishlYD9isfxXwlIhcjpPRr/acvgo44h5f5XNcURRlxigUfFsU/KNhKVg2G+ntA7PQ7WOMec4Ys9QYs84Ysw4nsF9qjOkHvgPcLCJxEVkPnAM8bow5CoyKyJWuy+e9lNYKFEVRTjtFzb91so+lXp9/dDZr/tUwxmwXkXuBHUAW+JDr9AH4IPBVoA2n0KvFXkVRZpRWyz7ebL/uYS4tzvwDC/5u9u/9+Xbgdp/ztgGbgnpfRVGUZikWfFuk+XuCfzTUYMG3RZm/7vBVFGXeU8z8Wyv7RMNS9yayObHDV1EUZS7TSp8/FDP/eou90Podvhr8FUWZ97Ra9ok2E/xV9lEURWkNp0v2acRKOmutnoqiKGcKp8vt00jHUM38FUVRWoz197dK849PI/O3F4xWtXfQ4K8oyrzndLp9Gn2OZv6KoigtovWyjxP0p6P5Z3I6yUtRFKUlFGSfFjZ2g8bcPvaCkVLZR1EUpTUsaI/S0xZl/eKOlrz+dKyedg7ArG/voCiKMldpj0V45m/f2LLXj01TVoqGpWXtHTT4K4qi4GTarWI6O3zBuWioz19RFGWOEpuGzx+c4K+N3RRFUeYotjd/tGHZRzN/RVGUOUtR9mlMWopFQqQ081cURZmbRKdZ8I2FQ7rDV1EUZa4Sb6bgq5m/oijK3CQacXf4Nhr8w1rwVRRFmbPEws4OYi34KoqizCOm097BPk+Dv6IoyhzFunwa3+EbIq2N3RRFUeYm02npDM7FIp3NtWJJGvwVRVFaTTM7fNXtoyiKMkeJTXuHr5DJquyjKIoyJ5nODF+Y5Zm/iPyxiOwSke0i8gnP8dtEZLf72PWe45eJyHPuY5+VVrbSUxRFmQX0tkcRgYUdsYaeFwuHW7bDt6mWziLyOuBG4BXGmJSILHWPbwRuBi4EVgAPi8i5xpgc8HngVuAx4HvADcD3m1mHoijKbKavp40H//RaNizpbOh50YjM2t4+HwQ+boxJARhjjrvHbwTuMcakjDF7gd3A5SLSB3QbYx41xhjgLuCmJtegKIoy6zlnWRehUINuH3eTlxMug6XZ4H8ucI2IbBWRn4jIK93jK4GDnvMOucdWut+XH/dFRG4VkW0ism1gYKDJpSqKoswtbK0gmw8++E8p+4jIw8Byn4f+2n3+AuBK4JXAvSJyFuB3eTM1jvtijLkDuANgy5YtrSl5K4qizFLOXd7FW1/RR74Fmf+Uwd8Y84Zqj4nIB4H7XAnncRHJA4txMvrVnlNXAUfc46t8jiuKoihlXH/hcq6/0C/3bp5mZZ9vA68HEJFzgRhwAvgOcLOIxEVkPXAO8Lgx5igwKiJXui6f9wL3N7kGRVEUpUGaHeD+b8C/icjzQBq4xb0L2C4i9wI7gCzwIdfpA06R+KtAG47LR50+iqIopxlpRRW5FWzZssVs27ZtppehKIoypxCRJ40xW8qP6w5fRVGUeYgGf0VRlHmIBn9FUZR5iAZ/RVGUeYgGf0VRlHnInHH7iMgAsH+m19Eki3H2QShF9DMpRT+PUvTzqKTRz2StMWZJ+cE5E/zPBERkm5/laj6jn0kp+nmUop9HJUF9Jir7KIqizEM0+CuKosxDNPifXu6Y6QXMQvQzKUU/j1L086gkkM9ENX9FUZR5iGb+iqIo8xAN/oqiKPMQDf6nARFZLSI/EpGdIrJdRD4802uaDYhIWESeFpHvzvRaZhoR6RWRb4nIC+7vyVUzvaaZRkT+zP3/8ryI/LuIJGZ6TacTEfk3ETnutsy3xxaKyEMi8pL7dcF0X1+D/+khC/w3Y8wFOCMvPyQiG2d4TbOBDwM7Z3oRs4TPAA8YY84HLmKefy4ishL4E2CLMWYTEAZuntlVnXa+CtxQduyjwCPGmHOAR9yfp4UG/9OAMeaoMeYp9/tRnP/YVQfXzwdEZBXwVuBLM72WmUZEuoFrgS8DGGPSxpihmV3VrCACtIlIBGhnno18Ncb8FDhVdvhG4E73+zuBm6b7+hr8TzMisg64BNg6syuZcT4NfATIz/RCZgFnAQPAV1wZ7Esi0jHTi5pJjDGHgX8EDgBHgWFjzIMzu6pZwTJ3HC7u16XTfSEN/qcREekE/gP4U2PMyEyvZ6YQkbcBx40xT870WmYJEeBS4PPGmEuAcZq4nT8TcLXsG4H1wAqgQ0TePbOrOrPQ4H+aEJEoTuC/2xhz30yvZ4a5GniHiOwD7gFeLyJfn9klzSiHgEPGGHs3+C2ci8F85g3AXmPMgDEmA9wHvGqG1zQbOCYifQDu1+PTfSEN/qcBEREcPXenMeaTM72emcYYc5sxZpUxZh1OEe+Hxph5m9UZY/qBgyJynnvoOmDHDC5pNnAAuFJE2t3/P9cxz4vgLt8BbnG/vwW4f7ovFAlkOcpUXA28B3hORJ5xj/2VMeZ7M7gmZXbxx8DdIhID9gDvm+H1zCjGmK0i8i3gKRy33NPMs1YPIvLvwGuBxSJyCPg/gY8D94rI+3EukL857dfX9g6KoijzD5V9FEVR5iEa/BVFUeYhGvwVRVHmIRr8FUVR5iEa/BVFUeYhGvwVRVHmIRr8FUVR5iH/P9Gg6Y2UZHlXAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "pl.plot(x.detach(), x.grad.detach())" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Concepts not covered in this lecture\n", + "\n", + "PyTorch's `autograd` is a very powerfull tool. For instance, it can calculate the Jacobian and Hessian of any given function! Here is a list of more advanced things that you can accomplish with `autograd`:\n", + "\n", + "- Vector-Jacobian products for non-scalar outputs (e.g. when `y` is a vector)\n", + "- Compute Jacobian and Hessian\n", + "- Retain the computation graph (useful for inspecting gradients inside a model)\n", + "- Sparse gradients\n", + "- Register and remove hooks (useful for saving gradients)\n", + "- How to set up user-designed `Function`s properly\n", + "- Numerical gradient checking\n", + "\n", + "\n", + "More info: pytorch.org/docs/stable/autograd.html" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### The interaction of `autograd` with `nn.Module`s and `nn.Parameters`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the next notebook we will see how to build a linear regression model using PyTorch's `nn.Module`. You will see that you don't need to worry about gradients when using `nn.Module` and `nn.Parameter`. This is because they automatically keep track of gradients for you." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 181, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "Parameter containing:\n", + "tensor([[-0.2067, -0.4198]], requires_grad=True)" + ] + }, + "execution_count": 181, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "x.grad" + "# w.x + b\n", + "lin = torch.nn.Linear(2, 1, bias=True) # nn.Linear is a nn.Module\n", + "lin.weight # lin.weight is a nn.Parameter!" + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.nn.parameter.Parameter" + ] + }, + "execution_count": 125, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(lin.weight)" ] }, { @@ -965,7 +2036,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/02-linear-regression.ipynb b/02-linear-regression.ipynb index 718ce09..6c3dcd4 100644 --- a/02-linear-regression.ipynb +++ b/02-linear-regression.ipynb @@ -7,9 +7,19 @@ "# Linear Regression and Gradient Descent" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this notebook we will see how we can perform linear regression in three different ways: \n", + "1. pure numpy\n", + "2. numpy + pytorch's autograd \n", + "3. pure pytorch" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -18,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -33,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -44,6 +54,27 @@ "from IPython.core.debugger import set_trace" ] }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.random.seed(0)\n", + "torch.manual_seed(0)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -53,9 +84,29 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 99, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(100, 1) (100,)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD4CAYAAAAEhuazAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAWcklEQVR4nO3de4xcZ3nH8d8zu3ZE2lQsdsDG60usXEQc9eIdmY3yD5QUnCqqISHFENFUxLiqjNq0/QPSSG4FioRoaWlVQ2XcCCo5SdM0yBG3XJCrSBUbvBMhaieYbpdsvLVFnM1SkIKy3p2nf+xMOh7P/cyZ9z1zvh8JvHvmzOyrEfzOe57znPeYuwsAkC+F0AMAAAwe4Q8AOUT4A0AOEf4AkEOEPwDk0GjoAXRq/fr1vm3bttDDAIBMKZVKr7j7lfXbMxP+27Zt0/T0dOhhAECmmNlco+2UfQAghwh/AMghwh8AcojwB4AcIvwBIIcIfwDIIcIfAHpQmlvUoeMzKs0thh5KTzLT5w8AsSjNLerOI1NaWi5r7WhBR/dNamLrWOhhdYWZPwB0aWp2QUvLZZVdurBc1tTsQughdY3wB4AuTW5fp7WjBY2YtGa0oMnt60IPqWuUfQCgSxNbx3R036SmZhc0uX1d5ko+EuEPAD2Z2DqWydCvouwDADlE+ANADhH+AJBDhD8A5BDhDwA5RPgDQA71JfzN7AEze9nMTtZse4uZPWVm/1X5d6zmtXvNbMbMTpvZ+/oxBgBA5/o18/+KpN112z4l6Tvufo2k71R+l5ldL2mvpB2V93zRzEb6NA4AQAf6Ev7u/oykV+s275H01crPX5X0/prtD7v76+7+Y0kzknb1YxwAgM6kWfN/m7ufk6TKv2+tbN8k6UzNfvOVbQCAAQlxwdcabPOGO5rtN7NpM5s+f/58ysMCgPxIM/x/YmYbJany78uV7fOSNtfsNy7pbKMPcPfD7l509+KVV16Z4lABIF/SDP/HJd1V+fkuScdqtu81s8vM7CpJ10j6XorjAADU6cuqnmb2kKR3SVpvZvOS/kLSZyU9YmZ3S3pJ0h2S5O6nzOwRSc9LWpZ0wN1X+jEOAEBn+hL+7v7hJi+9p8n+90u6vx9/GwDQPe7wBYAcIvwBoA9Kc4s6dHxGpbnF0EPpCE/yAoCESnOLuvPIlJaWy1o7WtDRfZPRP+WLmT8AJDQ1u6Cl5bLKLl1YLmtqdiH0kNoi/AEgocnt67R2tKARk9aMFjS5fV3oIbVF2QcAEprYOqaj+yY1NbugscvXvjHzj7n0Q/gDQAOluUVNzS5ocvu6jkK8uk9Wav+EPwDU6fUCbqPaf6zhT80fAOr0egE3S7V/Zv4AUKca4heWy12FeG3tv9NyUSjm3nA15egUi0Wfnp4OPQwAOdFtzT/WMZhZyd2L9duZ+QNAAxNbx4LO3NO+cYyaPwBEKO0bxwh/AIhQ2hePKfsAQITSvnhM+ANApNK87kDZBwByiPAHgBwi/AEEl7UHoQwDav4Agsrig1BiuAEsKcIfQFBZWgxNyubBqhHKPgCC6rafPXSJKItP7WqEmT+AoLrpZ49h1t3rom+xIfwBBNeun71aYz/7018ELxFlaeXOVgh/AFGrne2PFkyjIwWtrISddYde9K0fCH8AA9dNt0xtjX2l7PrQrs3a9OY3RTXrzmL3D+EPYKC6rdvX19hv3zkeVcDGcB2iF4Q/gIHqtrUz9hp71lpVqwh/AAPVS7dMzDX2rHb/8BhHAANXXyPvtGbebL/QNffQf78VHuMIIBq1M/lOa+bN9ouh5h7zmUkz3OELIKhO75httt+w3HE7aKnP/M3sRUk/l7Qiadndi2b2Fkn/ImmbpBcl/a67s5wfkEOd1syb7ZfVmntoqdf8K+FfdPdXarZ9TtKr7v5ZM/uUpDF3/2Srz6HmDwyvrNf8Y9as5h8q/E9Lepe7nzOzjZL+3d2va/U5hD8AdK9Z+A+i5u+SnjSzkpntr2x7m7ufk6TKv29t9EYz229m02Y2ff78+QEMFQDyYRDdPje5+1kze6ukp8zsh52+0d0PSzosrc780xogAPRTFspQqYe/u5+t/PuymX1N0i5JPzGzjTVln5fTHgcADEIMraedSLXsY2a/ZGZXVH+W9F5JJyU9Lumuym53STqW5jgAYFCy0nqa9sz/bZK+ZmbVv/Wgu3/bzE5IesTM7pb0kqQ7Uh4HgA5koVwRu6y0nqYa/u4+K+nXGmxfkPSeNP82gO4MolyRh4NL7AvRVbG8A5BjtWGc9uqUWamF90MWlnsg/IGcqg/jg7fuSLVckdWlj4cV4Q/kVH0YL762lGq5Iiu18Lwg/IGcahTGaZYrWtXC83AtIDaEP5BTtWE8dvnaN1oS+xG+zcK80cElT9cCYkL4AwlkfcZaHXM/w7fbMOdaQBis5w/0qBpyn3/ytO48MqXSXDZXJX/suXm9fqF/NyV1e5NTtfw0YuJawAAx8wd6NAwz1tLcov51+oyqC2eNjCQP324v7GalL37YEP5Aj4ahe2VqdkHL5dXoN0nv2HBF4s/sJcyz0Bc/bHiAO5BA1mv+tfX5sq8eAC5bw0XXYRJyPX9gaE1sHdOBd1+d2aCsztJvunq9TKsP34h5MTL0D+EP5NzE1jHdc/O1umwNF13zhJo/kHPV0tXv37hNp879TLfcsDGzZzLoHOEP5Fijmv+JF1/VdRuu4AAw5Cj7ADlW264qZavmX5pb1KHjM5m9vyI0Zv5AhiXtNqq2q1YPAAUlq/kPqvuJJSGSI/yBjOpHANav77P42lLPwT3IQB6GG+xCI/yBjKoNwKULZX3h6R/pnpuv7ekA0I/gTCuQG51NDMMNdqER/kBGjV2+VgUzubvKkv5j5hWdePHV1EsgzUo7aQRys7MJloRIjvAHMqg0t6hPf/2Uyu6yyt1ZgyiBtCrtpBHIrc4mWBIiGcIfSEHaFz5rQ7Fg0khh9Qwg7RJIu9JOvwOZ8k56CH+gz5rNjpMeEGrfXx+KB2/dkehibacGHcbtziayvrZSSIQ/0GfN1rNP0gnT6IASquZ9+85xeeXfQfzdZmcTtHsmQ/gDfdZodtxNJ0yj2Wyj93e7oFw/zjxqw/b2neNdf0Y/0e6ZDOEP9FmzUkUn5ZJms9mk5ZZ+zJJjC1uuByRD+AMpqC9V1B8QJOnQ8ZlLZuHNAjZpJ00/gju2sKXdMxnCHxiQaoi3moW3CtheOmmqpZ6xy9c2/NxuSkExhi3tnr0j/IEBa3ZBuBqqnZwhdKL+IFPfEdRLKYiwHR6EPzBg9bP7scvXXhLCB959deI6ff1BZvG1JR1499VNXw9dw8dgsaQzkEAvywpXyyd/+t7rdHTfpBZfW2p6JtBoe6eqB5lmT+dq9zqGGzN/5F6vLZBJZub15ZNG9fikF1jb1ehjrOFjcAh/5FqSAG9Xu+/mQNAohPsRzu1q9NTw8ytY+JvZbkl/J2lE0hF3/2yosSC/ktS9O6nd93om0G57rW7OXFgOAVVBwt/MRiQdkvRbkuYlnTCzx939+RDjQX4lKa3Uz8xDXEDt5syl3b4cGPIl1Mx/l6QZd5+VJDN7WNIeSYQ/BippaaWT2n2aujngtNqXdXLyJ1T4b5J0pub3eUnvrN/JzPZL2i9JW7ZsGczIkDv9qnuHuIDazZlLq31p+8yfUOFvDbb5JRvcD0s6LEnFYvGS14HYDPoCajcHnFb7xrZ0A9IXKvznJW2u+X1c0tlAYwFSl2Y9vZsDTqsLy7R95kuo8D8h6Rozu0rS/0jaK+kjgcYCpCrGenqjgxFtn/kSJPzdfdnMPiHpCa22ej7g7qdCjAVI29Tsgl6/UJZLWroQvp4e48EIgxesz9/dvynpm6H+PoZH7C2KY5evfeOCVrnye0hc3IXEHb7IuCzMYhdfW1LBpLKvdjqcPPu/Xb2/3wc3Lu5CIvyRcVmYxU5uX6fRgmlpxeWSHi3NX/T821bh3uvBrdVn9uvibuxnXGiN8EemZWEWO7F1THcUN+vBZ1+SS1pZ+f+DVLtw7+Xg1skBI+nF3SyccaE1lnRGptUvjxxrAN22c1yXrbl0+eR2yzb3suxy0qWgOzGIv4F0MfNH5g2iRTFpiaNZqaXdmUsvJZpBnA1l4YwLrZl7Nm6cLRaLPj09HXoYyKFeShyhV9qsfXZv7aMb+4mafzaYWcndi/XbmfkDbXRbd+/2YJHGmUv189Ksy3NTWLZR8wfa6LbuHks9PJZxIE7M/IEmassaB2/doW+dPKdbbtjYdrYbSz08lnEgToQ/0MCDz76kg8dOaqXsKphkBVO57Drx4qu6bsMVPa+eOUixjANxIvyBOqW5RR08dlLL5dVmiBWv/lfnvfax1MNjGQfiQ80fqFGaW9QXnv7RG8Ffy9R5r/2glOYWdej4jEpzi6GHgoxh5g9U1Hbp1KqG/gcnxi9aliE07rJFEoQ/ohBDz3htd0zBpJuuXq9bbtj4Rp98dR9JUYRsFtY1QrwIfwQXywy2vjvmnpuvjfoB53TzIAnCH8HFMoNt1R3Ty41eaZ/J0M2DJAh/BBfTDLZZd0w3Y6xtE71sTbpnCXTzoFeEP4LLwgy20zHWt4nG8NhGoBHCH1GoncHGcPG3kU5m2VOzCyrXLJZYKBi1eESJ8EdqegnxGC+sdqNaHlpaLqtgpk/vuSFT40d+EP5IRa8hnvbF37TPKrJQwgIkwh8p6TXE07z4O6izCi7CIgsIf6Si1xBPc+YcS0spEAPCH6lIEuJpzJxLc4s6+9NfaLRgWil7ywNSrBecgX4i/JGaWMofteWe0ZGCPrRrc9M1elqVhjgoYJgQ/hh6teWelZWyNr35TU3Du1lpqDS3qA9/eeqNMtZDH89WFxJQjyWdkUgWlhTu5jGMzfZ97Ll5LS2X5ZKWlst67Ln5AY0eSAczf/QsaffMoMoo7a4/1I+j0b71q/tfuto/kC2EP3qWpHtm0DdzNbv+0Gwc9fvevnNcj06f0YUV15oR0+07x1MbKzAIhD96lqQnP5a2y07HMbF1TA/tv5ELvhgahD96lqSdM5aVPLsZRyzdS0A/mHs61Usz+0tJH5d0vrLpz939m5XX7pV0t6QVSX/k7k+0+7xisejT09OpjBVhDKrm3+7v0MKJYWZmJXcv1m9Pe+b/t+7+13UDuV7SXkk7JL1d0tNmdq27r6Q8FkSml5l0t0HdrKZf/zmEPvImRNlnj6SH3f11ST82sxlJuyR9N8BYkCG9XCRuVNOXNPCVQzm7QGzS7vP/hJn9wMweMLPq/+I3STpTs898ZRvQUrMgb6VR334vn5NE9aD1+SdP684jU1HfE4H8SDTzN7OnJW1o8NJ9kr4k6TNabYn+jKTPS/qYJGuwf8MLD2a2X9J+SdqyZUuSoWII9HKRuNlF6UFebI6lswmolSj83f3mTvYzsy9L+nrl13lJm2teHpd0tsnnH5Z0WFq94Nv7SDEMeu0uqq/pD3rN/Vg6m4BaaXb7bHT3c5Wf/0TSO919r5ntkPSgVuv8b5f0HUnXtLvgS7cPsoyaP0IJ0e3zOTP7da2WdF6U9AeS5O6nzOwRSc9LWpZ0gE4fDDs6ihCb1MLf3T/a4rX7Jd2f1t8GALTGqp4AkEOEPwDkEOGPoZGFZwsAsWBhtyGR926SQS8RDWQd4T8EshR8aR2kuJEK6A7hPwSyEnxpHqS4kQroDuE/BLISfGkepAZ91y6QdYT/EEgr+GpLNJISf37aBylupAI6l9ryDv3G8g6D9eCzL+ngsZNaKa8+s1ZmWl5JXq7J+4VpYNBCPcwFGVSaW9TBYye1XF6dGCytuEwuV/JyDbNzIA70+eMSU7MLKtecEVbXwq9dEx9AtjHzxyWqtfml5bIKZvr0nht03YYrKNcAQ4TwxyWaXUCmzg8MD8IfDfWrNp+lG9CAPKHmj1QN+nm5ADpD+CNVjR6gDiA8yj5IFXfeAnEi/JE6evuB+FD2AYAcIvzRFg9JAYYPZR+0RKsmMJyY+Q+hfs7UadUEhhMz/yHT75l6Vp4VAKA7hH9E+rEMQr8fmEKrJjCcCP9I1M7YR0cK+uDEuG7fOS6pu4eopDFTp1UTGD6EfyRqZ+xLy2U99OxLenT6TNcPUWGmDqAThH8kqjP21y+U5dLqg1NWVn/q9iEqzNQBtEO3TySqM/aPvHOL1o7Y6lo4I8ZDVACkgpl/RKoz9tt2jvf1wekAUI/wj1B92YbQB9BvlH0AIIcIfwDIoUThb2Z3mNkpMyubWbHutXvNbMbMTpvZ+2q2T5jZf1Ze+3szsyRjAAB0L+nM/6Sk2yQ9U7vRzK6XtFfSDkm7JX3RzEYqL39J0n5J11T+szvhGAAAXUoU/u7+grufbvDSHkkPu/vr7v5jSTOSdpnZRkm/4u7fdXeX9M+S3p9kDACA7qVV898k6UzN7/OVbZsqP9dvb8jM9pvZtJlNnz9/PpWBAkAetW31NLOnJW1o8NJ97n6s2dsabPMW2xty98OSDktSsVhsuh8AoDttw9/db+7hc+clba75fVzS2cr28QbbAQADlFbZ53FJe83sMjO7SqsXdr/n7uck/dzMJitdPr8nqdnZAwAgJUlbPT9gZvOSbpT0DTN7QpLc/ZSkRyQ9L+nbkg64+0rlbX8o6YhWLwL/t6RvJRkDAKB7ttp0E79isejT09OhhwEAmWJmJXcv1m/nDl8AyCHCHwByiPAHgBwi/AEghwh/AMghwh8AcojwB4AcIvwBIIcIfwDIIcIfAHKI8AeAHCL8ASCHCH8AyCHCHwByaOjDvzS3qEPHZ1SaWww9FACIRtvHOGZZaW5Rdx6Z0tJyWWtHCzq6b1ITW8dCDwsAghvqmf/U7IKWlssqu3Rhuayp2YXQQwKAKAx1+E9uX6e1owWNmLRmtKDJ7etCDwkAojDUZZ+JrWM6um9SU7MLmty+jpIPAFQMdfhLqwcAQh8ALjbUZR8AQGOEPwDkEOEPADlE+ANADhH+AJBDhD8A5JC5e+gxdMTMzkuaCz2OlK2X9EroQUSE7+NifB8X4/u4WLPvY6u7X1m/MTPhnwdmNu3uxdDjiAXfx8X4Pi7G93Gxbr8Pyj4AkEOEPwDkEOEfl8OhBxAZvo+L8X1cjO/jYl19H9T8ASCHmPkDQA4R/gCQQ4R/ZMzsr8zsh2b2AzP7mpm9OfSYQjKzO8zslJmVzSy3bX1mttvMTpvZjJl9KvR4QjKzB8zsZTM7GXosMTCzzWZ23MxeqPx/5Y87eR/hH5+nJN3g7r8q6UeS7g08ntBOSrpN0jOhBxKKmY1IOiTpFknXS/qwmV0fdlRBfUXS7tCDiMiypD9z93dImpR0oJP/fRD+kXH3J919ufLrlKTxkOMJzd1fcPfToccR2C5JM+4+6+5Lkh6WtCfwmIJx92ckvRp6HLFw93Pu/lzl559LekHSpnbvI/zj9jFJ3wo9CAS3SdKZmt/n1cH/uZE/ZrZN0m9IerbdvkP/GMcYmdnTkjY0eOk+dz9W2ec+rZ7OHR3k2ELo5PvIOWuwjR5tXMTMflnSv0m6x91/1m5/wj8Ad7+51etmdpekWyW9x3NwI0a77wOal7S55vdxSWcDjQURMrM1Wg3+o+7+WCfvoewTGTPbLemTkn7H3V8LPR5E4YSka8zsKjNbK2mvpMcDjwmRMDOT9E+SXnD3v+n0fYR/fP5B0hWSnjKz75vZP4YeUEhm9gEzm5d0o6RvmNkTocc0aJUGgE9IekKrF/MecfdTYUcVjpk9JOm7kq4zs3kzuzv0mAK7SdJHJf1mJTO+b2a/3e5NLO8AADnEzB8AcojwB4AcIvwBIIcIfwDIIcIfAHKI8AeAHCL8ASCH/g9tO5cEnBSqEQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], "source": [ "from sklearn.datasets import make_regression\n", "\n", @@ -71,19 +122,270 @@ ")\n", "\n", "fix, ax = plt.subplots()\n", - "ax.plot(X, y, \".\")" + "ax.plot(X, y, \".\")\n", + "print(X.shape, y.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Numpy Solution" + ] + }, + { + "cell_type": "code", + "execution_count": 270, + "metadata": {}, + "outputs": [], + "source": [ + "class LinearRegression(object):\n", + " def __init__(self, n_features, n_targets=1, lr=0.1):\n", + " self.W = np.zeros((n_targets, n_features))\n", + " self.lr = lr\n", + "\n", + " def update_weight(self, X, y):\n", + " m = X.shape[0]\n", + " y_hat = self.predict(X)\n", + " W_grad = 2 * np.dot(X.T, y_hat - y) / m\n", + " self.W = self.W - self.lr * W_grad\n", + "\n", + " def loss(self, y_hat, y):\n", + " return np.mean(np.power(y_hat - y, 2))\n", + "\n", + " def predict(self, X):\n", + " y_hat = np.dot(X, self.W.T)\n", + " return y_hat.squeeze(-1)\n", + "\n", + " def train(self, X, y, epochs=50):\n", + " \"\"\"\n", + " X (n_examples x n_features):\n", + " y (n_examples): gold labels\n", + " \"\"\"\n", + " loss_history = []\n", + " for _ in range(epochs):\n", + " # for x_i, y_i in zip(X, y):\n", + " # self.update_weight(x_i, y_i)\n", + " self.update_weight(X, y)\n", + " y_hat = self.predict(X)\n", + " loss = self.loss(y_hat, y)\n", + " loss_history.append(loss)\n", + " return loss_history" + ] + }, + { + "cell_type": "code", + "execution_count": 240, + "metadata": {}, + "outputs": [], + "source": [ + "use_bias = False\n", + "if use_bias:\n", + " X_np = np.hstack([np.ones((n_samples,1)), X])\n", + " n_features += 1\n", + "else:\n", + " X_np = X" + ] + }, + { + "cell_type": "code", + "execution_count": 267, + "metadata": {}, + "outputs": [], + "source": [ + "model = LinearRegression(n_features=n_features, n_targets=1, lr=0.1)\n", + "loss_history = model.train(X_np, y, epochs=50)\n", + "y_hat = model.predict(X_np)" + ] + }, + { + "cell_type": "code", + "execution_count": 268, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfZRV9X3v8fdnnmFgGGAeQGaEwRAVNGAkhFRvE6NGk+aKK23uxVsrSU1JU1fT29WHq03b5HaFNqs395qkqem1aqMriYaVxEjTJksvebBJjGSMGlAkoCAMTzM8CMPTPH7vH2ePHoYDA3NmODNnf15rnXX2+e29z/5uWHzO5rf3/m1FBGZmlg4lhS7AzMzOH4e+mVmKOPTNzFLEoW9mliIOfTOzFHHom5mliEPfrAhJ+qGkjxS6Dht7HPo2ZkjaJum6QtdhVswc+mYjSFJZoWswOxOHvo0Lkn5P0hZJByStkXRB0i5Jd0tql3RI0i8lXZbMe5+kFyV1Stop6U9P890fkvQTSf+QfMdLkq7Nmj9F0v2Sdiff82lJpYPWvVvSAeBTOb6/RNKdkl6WtF/SaknTknlzJIWklZJ2Jdv4k6x1KyV9Lpm3K5muzJq/TNJzkg4n339j1qZnJ7V1SnpcUl1efwlWFBz6NuZJejfwd8B/AWYCrwKPJLPfA/w68GagFvivwP5k3v3ARyNiMnAZ8P0zbObtwCtAHfBJ4FsDwQw8CPQCbwKuSLb5kRzrNgCrcnz3x4GbgXcCFwAHgX8ctMw1wLzku+/M6ub6BLAUWAQsBJYAf5n8uSwBHgL+LNn3Xwe2ZX3nfwM+nNRVAeT80bOUiQi//BoTLzKBdV2O9vuBv8/6PAnoAeYA7wZ+RSYYSwattx34KFAzxHY/BOwClNW2DvgdoBHoAiZkzbsF+EHWutuH+P6NwLVZn2cm9Zcl+xDAJVnz/x64P5l+GXhf1rwbgG3J9P8F7j7NNn8I/GXW5z8Avlfov2O/Cv/ykb6NBxeQOboHICKOkDmanxUR3we+SObIea+keyXVJIv+JvA+4FVJP5L0jjNsY2dEZI8++Gqy3dlAObBb0muSXiMTtg1Zy+4Yov7ZwKNZ628E+sj8oOT6joFtn7Lvg+Y1k/lROJ09WdPHyPxYWso59G082EUmOAGQVA1MB3YCRMQXIuJKYAGZbp4/S9p/HhHLyAT0t4HVZ9jGLEnK+nxhst0dZI706yKiNnnVRMSCrGWHGqp2B/DerPVrI6IqInZmLdOcY9un7PugeTuAi4bYttlJHPo21pRLqsp6lQFfAz4saVFyEvNvgacjYpukt0l6u6Ry4ChwAuiTVCHptyVNiYge4DCZo+vTaQA+Lqlc0geBS4F/j4jdwOPA/5ZUk5yUvUjSO89hn/4JWCVpNoCkeknLBi3zV5ImSlpAph/+60n7w8BfJuvUAX8NfCWZd3/y53JtUtcsSZecQ12WQg59G2v+HTie9fpURKwF/gr4JrCbzNHt8mT5GuCfyZwcfZVMt89nk3m/A2yTdBj4feDWM2z3aTInUveRORn7WxExcEL4NjInQl9MtvMNMv3yZ+vzwBrgcUmdwM/InPzN9iNgC7AW+GxEPJ60fxpoBX4JrAd+kbQREevI/EDcDRxKvmM2Zmegk7sxzdJH0oeAj0TE1QXY9hxgK1AeEb3ne/uWPj7SNzNLEYe+mVmKuHvHzCxFfKRvZpYiY35wqLq6upgzZ06hyzAzG1eeeeaZfRFRP7h9zIf+nDlzaG1tLXQZZmbjiqRXc7W7e8fMLEUc+mZmKTJk6Et6IBmrfMOg9j+UtEnSC5L+Pqv9rmTc802Sbshqv1LS+mTeFwaNc2JmZufB2RzpfxnIfjADkq4BlgFvSQae+mzSPp/M7fELknXuGXjYBPAlYCWZW93nDf5OMzMbfUOGfkQ8CRwY1Pwx4DMR0ZUs0560LwMeiYiuiNhKZiyRJZJmkhnT/Klk+NqHyDxUwszMzqPh9um/GfhPkp5Oxil/W9I+i5PHBW9L2mYl04Pbc0oeHdcqqbWjo2OYJZqZ2WDDDf0yYCqZpxX9GbA66aPP1U8fZ2jPKSLujYjFEbG4vv6Uy0zNzGyYhhv6bcC3ImMd0E/m2aJtnPwwiCYyD3xoS6YHt4+aB3+6jX99flQ3YWY27gw39L9N5tmkSHozmbHG95EZM3y5pEpJLWRO2K5LHkTRKWlp8j+C24DH8q7+DB75+Q4ee27n0AuamaXIkHfkSnoYeBdQJ6kN+CTwAPBAchlnN7AiOUH7gqTVZB420QvcEREDTyv6GJkrgSYA301eo6axppI9h0+M5ibMzMadIUM/Im45zaycTyGKiFVknjw0uL0VuOycqsvDjJoqXth1+HxtzsxsXCjaO3IbaqrYd6SLnr7+QpdiZjZmFG3oz6ipIgL2HekqdClmZmNG0YZ+Y00lAHsOuV/fzGxAEYd+FQB7D/tI38xsQApC30f6ZmYDijb0p1dXUFYih76ZWZaiDf2SEtEw2dfqm5llK9rQB2icUkW7+/TNzF5X3KE/ucpH+mZmWYo69GdMqXKfvplZlqIO/YaaSjpP9HKsu7fQpZiZjQlFHfozfK2+mdlJijr0B67V9125ZmYZqQh99+ubmWUUeehnxt9x6JuZZRR16E+uKqe6otSXbZqZJYo69ME3aJmZZSv+0PcNWmZmryv60PcNWmZmbyj60G+oqaT9cBeZ57abmaVb0Yf+jJoquvv6OXisp9ClmJkVXNGHvm/QMjN7Q2pCf2+nQ9/MbMjQl/SApHZJG3LM+1NJIakuq+0uSVskbZJ0Q1b7lZLWJ/O+IEkjtxun9/oNWj7SNzM7qyP9LwM3Dm6U1AxcD2zPapsPLAcWJOvcI6k0mf0lYCUwL3md8p2joWGyB10zMxswZOhHxJPAgRyz7gb+HMi+LGYZ8EhEdEXEVmALsETSTKAmIp6KzGU0DwE35139WagoK2F6dYWv1TczY5h9+pJuAnZGxPODZs0CdmR9bkvaZiXTg9tP9/0rJbVKau3o6BhOiSdprKmi3aFvZnbuoS9pIvAJ4K9zzc7RFmdozyki7o2IxRGxuL6+/lxLPEVjjR+QbmYGwzvSvwhoAZ6XtA1oAn4haQaZI/jmrGWbgF1Je1OO9vMic1eu+/TNzM459CNifUQ0RMSciJhDJtDfGhF7gDXAckmVklrInLBdFxG7gU5JS5Ordm4DHhu53TizhslV7D/aRU9f//napJnZmHQ2l2w+DDwFXCypTdLtp1s2Il4AVgMvAt8D7oiIvmT2x4D7yJzcfRn4bp61n7UZU6qIgI5OH+2bWbqVDbVARNwyxPw5gz6vAlblWK4VuOwc6xsRA9fq7zl8ggtqJxSiBDOzMaHo78iFN+7K9RU8ZpZ2qQp9j79jZmmXitCfNrGC8lKx1336ZpZyqQj9khLRMLnK4++YWeqlIvQhczLXI22aWdqlKPSr3KdvZqmXqtBv9125ZpZyqQr9zq5ejnb1FroUM7OCSU3oz5iSPEzF1+qbWYqlJvQbk4epeLRNM0uz9IT+lIG7ct2vb2bplZ7Qr/GRvplZakJ/UmUZkyrL3KdvZqmWmtAHaKipdOibWaqlKvRn1PgJWmaWbqkKfd+Va2Zpl7rQb+88QcRpn8luZlbUUhb6lfT0BQeOdhe6FDOzgkhV6M9ILtt0v76ZpVWqQr/h9dB3v76ZpVOqQn/GFIe+maVbqkK/flJm0DXflWtmaTVk6Et6QFK7pA1Zbf9L0kuSfinpUUm1WfPukrRF0iZJN2S1XylpfTLvC5I08rtzZhVlJdRNqnCfvpml1tkc6X8ZuHFQ2xPAZRHxFuBXwF0AkuYDy4EFyTr3SCpN1vkSsBKYl7wGf+d50TC5yt07ZpZaQ4Z+RDwJHBjU9nhEDDyN5GdAUzK9DHgkIroiYiuwBVgiaSZQExFPReYi+YeAm0dqJ87FjCkOfTNLr5Ho0/9d4LvJ9CxgR9a8tqRtVjI9uD0nSSsltUpq7ejoGIES3zBzShVtB4+P6HeamY0XeYW+pE8AvcBXB5pyLBZnaM8pIu6NiMURsbi+vj6fEk/RUlfNoeM9HPQNWmaWQsMOfUkrgPcDvx1vjGvQBjRnLdYE7Eram3K0n3dz66sBeGXf0UJs3sysoIYV+pJuBP4HcFNEHMuatQZYLqlSUguZE7brImI30ClpaXLVzm3AY3nWPiwtdZMA2OrQN7MUKhtqAUkPA+8C6iS1AZ8kc7VOJfBEcuXlzyLi9yPiBUmrgRfJdPvcERF9yVd9jMyVQBPInAP4LgXQNHUCZSVi674jhdi8mVlBDRn6EXFLjub7z7D8KmBVjvZW4LJzqm4UlJeWcOG0iT7SN7NUStUduQNa6qp5pcOhb2bpk9rQ37b/KP39HlffzNIlnaFfX82Jnn6PwWNmqZPO0K/LXLbpfn0zS5tUhv7c5LJNX6tvZmmTytBvrKlkQnkpW30y18xSJpWhL4mWumpfq29mqZPK0IfMyVz36ZtZ2qQ29OfWVbPj4HG6e/sLXYqZ2XmT2tBvqaumrz/YcfDY0AubmRWJVIc+4JO5ZpYqDn3365tZiqQ29GsnVjCtusLX6ptZqqQ29AFftmlmqePQ95G+maVI6kN/7+Eujnb1FroUM7PzItWhP9cnc80sZVId+i1+SLqZpUyqQ3/OdF+rb2bpkurQryovZVbtBF/BY2apkerQB5jrgdfMLEVSH/otddW8su8oEX5erpkVvyFDX9IDktolbchqmybpCUmbk/epWfPukrRF0iZJN2S1XylpfTLvC5I08rtz7lrqquk80cv+o92FLsXMbNSdzZH+l4EbB7XdCayNiHnA2uQzkuYDy4EFyTr3SCpN1vkSsBKYl7wGf2dBeAweM0uTIUM/Ip4EDgxqXgY8mEw/CNyc1f5IRHRFxFZgC7BE0kygJiKeikw/ykNZ6xTUwPNyfQWPmaXBcPv0GyNiN0Dy3pC0zwJ2ZC3XlrTNSqYHt+ckaaWkVkmtHR0dwyzx7MyaOoHyUvlafTNLhZE+kZurnz7O0J5TRNwbEYsjYnF9ff2IFZdLaYmYPd0Dr5lZOgw39PcmXTYk7+1JexvQnLVcE7AraW/K0T4meOA1M0uL4Yb+GmBFMr0CeCyrfbmkSkktZE7Yrku6gDolLU2u2rkta52Cm1tXzbb9x+jr92WbZlbczuaSzYeBp4CLJbVJuh34DHC9pM3A9clnIuIFYDXwIvA94I6I6Eu+6mPAfWRO7r4MfHeE92XYWuqq6e7tZ9drxwtdipnZqCobaoGIuOU0s649zfKrgFU52luBy86puvMk+7LN5mkTC1yNmdnoSf0dufDGaJvu1zezYufQB+onVTKpssyhb2ZFz6EPSHp9DB4zs2Lm0E/4IelmlgYO/URLXTVtB49zoqdv6IXNzMYph37i4hmTiYBNezoLXYqZ2ahx6CcWNtcC8HzbawWuxMxs9Dj0ExdMqaJ+ciXPbXfom1nxcugnJLGwqZbndjj0zax4OfSzXHFhLa/sO8qhYz2FLsXMbFQ49LMsbHK/vpkVN4d+lrc0T0HCXTxmVrQc+llqqsq5qH4Szzv0zaxIOfQHGTiZm3mUr5lZcXHoD7Lowlr2H+2m7aDH1jez4uPQH+SK5CYt9+ubWTFy6A9y8YzJVJaVOPTNrCg59AcpLy3hsllTHPpmVpQc+jksaq5lw85D9PT1F7oUM7MR5dDPYWFzLV29/R5x08yKjkM/h4GTuc+6i8fMioxDP4emqROYVl3hm7TMrOjkFfqS/ljSC5I2SHpYUpWkaZKekLQ5eZ+atfxdkrZI2iTphvzLHx2SWNTsETfNrPgMO/QlzQI+DiyOiMuAUmA5cCewNiLmAWuTz0ian8xfANwI3COpNL/yR8+i5lpe7jjC4RMecdPMike+3TtlwARJZcBEYBewDHgwmf8gcHMyvQx4JCK6ImIrsAVYkuf2R83C5loiYH3boUKXYmY2YoYd+hGxE/gssB3YDRyKiMeBxojYnSyzG2hIVpkF7Mj6irak7RSSVkpqldTa0dEx3BLzsqjJd+aaWfHJp3tnKpmj9xbgAqBa0q1nWiVHW85RzSLi3ohYHBGL6+vrh1tiXqZMLGduXTXP+vGJZlZE8uneuQ7YGhEdEdEDfAv4NWCvpJkAyXt7snwb0Jy1fhOZ7qAxa2GzR9w0s+KST+hvB5ZKmihJwLXARmANsCJZZgXwWDK9BlguqVJSCzAPWJfH9kfdouZa9h3pYtehE4UuxcxsRJQNd8WIeFrSN4BfAL3As8C9wCRgtaTbyfwwfDBZ/gVJq4EXk+XviIi+POsfVQsHRtzc/hqzaicUuBozs/wNO/QBIuKTwCcHNXeROerPtfwqYFU+2zyfLp05mYrSEp5ve43feMvMQpdjZpY335F7BpVlpcy/oIbnfDLXzIqEQ38Ii5prWb/zEL0ecdPMioBDfwiLmms53tPHpr0ecdPMxj+H/hAWz8kMHfTTLfsLXImZWf4c+kNomjqRS2ZM5omNewtdiplZ3hz6Z+H6+Y20bjvAgaPdhS7FzCwvDv2zcN2ljfQH/OCl9qEXNjMbwxz6Z+HyWVNorKnkiRfdxWNm45tD/yyUlIhrL23kyc0dnOgZ0zcRm5mdkUP/LF0/v5Fj3X089Yqv4jGz8cuhf5beMXc6EytK3cVjZuOaQ/8sVZWX8s4317N24176+z3UspmNTw79c3DdpY3sPdzF+p1+hKKZjU8O/XNwzSUNlAj+n2/UMrNxyqF/DqZVV7B4zjT365vZuOXQP0fXX9rIS3s62XHgWKFLMTM7Zw79c3Td/EbAXTxmNj459M9RS101b2qY5C4eMxuXHPrDcP38Rp7eeoBDx3oKXYqZ2Tlx6A/DdZc20tcf/PBXHoDNzMYXh/4wXNFcS92kCnfxmNm449AfhpISce0ljfxoUwfdvX52rpmNHw79Ybp+fiOdXb08vdUDsJnZ+JFX6EuqlfQNSS9J2ijpHZKmSXpC0ubkfWrW8ndJ2iJpk6Qb8i+/cK56Ux0TK0r59rO7Cl2KmdlZy/dI//PA9yLiEmAhsBG4E1gbEfOAtclnJM0HlgMLgBuBeySV5rn9gplQUcpvXdnEvz6/i/bOE4Uux8zsrAw79CXVAL8O3A8QEd0R8RqwDHgwWexB4OZkehnwSER0RcRWYAuwZLjbHws+fFUL3X39fOVn2wtdipnZWcnnSH8u0AH8i6RnJd0nqRpojIjdAMl7Q7L8LGBH1vptSdspJK2U1CqptaOjI48SR1dLXTXXXtLAV3/2qp+oZWbjQj6hXwa8FfhSRFwBHCXpyjkN5WjLOTB9RNwbEYsjYnF9fX0eJY6+269uYf/RbtY85759Mxv78gn9NqAtIp5OPn+DzI/AXkkzAZL39qzlm7PWbwLGfVK+46LpXDJjMg/8ZCsRfriKmY1tww79iNgD7JB0cdJ0LfAisAZYkbStAB5LptcAyyVVSmoB5gHrhrv9sUISv3t1Cy/t6eSnL/vyTTMb2/K9eucPga9K+iWwCPhb4DPA9ZI2A9cnn4mIF4DVZH4YvgfcERFF0RF+08ILqJtUwf0/3lroUszMzqgsn5Uj4jlgcY5Z155m+VXAqny2ORZVlZfy22+fzefXbubljiNcVD+p0CWZmeXkO3JHyK1LZ1NRWsKXf7Kt0KWYmZ2WQ3+E1E+uZNmiC/jGM228dqy70OWYmeXk0B9BH76qheM9fTy8bsfQC5uZFYBDfwTNv6CGX7toOg89tY2ePo++aWZjj0N/hN1+dQu7D53guxv2FLoUM7NTOPRH2DUXN9BSV80/rN3ssfbNbMxx6I+wkhLxifddyub2I/zzf7xS6HLMzE7i0B8F181v5H2Xz+Dzazezdd/RQpdjZvY6h/4o+dR/XkBlWQl/8a31HpPHzMYMh/4oaaip4s73XsJTr+znm7/YWehyzMwAh/6ouuVtF7J49lQ+/W8vsv9IV6HLMTNz6I+mkhLxdx+4nKNdvXz63zYWuhwzM4f+aJvXOJmPvfMiHn12J/+xeew+BczM0sGhfx78wTVvYm5dNZ94dAPHu4tiNGkzG6cc+udBVXkpf/uBy9l+4BifW/urQpdjZinm0D9Pls6dzvK3NXPvk6/wnV+O+6dEmtk45dA/jz510wIWz57KH3/9OX6yZV+hyzGzFHLon0dV5aXcd9vbmFs3iZUPtbJh56FCl2RmKePQP8+mTCznoduXUDuxgg/9yzq2eZgGMzuPHPoF0FhTxUO3L6GvP7jtgXW0d54odElmlhIO/QK5qH4S//LhJXR0drHigZ9z+ERPoUsysxRw6BfQouZavnTrW9m8t5OVD7VypKu30CWZWZHLO/QllUp6VtJ3ks/TJD0haXPyPjVr2bskbZG0SdIN+W67GLzr4gY++8GFrNt6gJu++GM27eksdElmVsRG4kj/j4DsgWXuBNZGxDxgbfIZSfOB5cAC4EbgHkmlI7D9ce/mK2bxtd9byuHjvdz8jz/h0WfbCl2SmRWpvEJfUhPwG8B9Wc3LgAeT6QeBm7PaH4mIrojYCmwBluSz/WKydO50/v3jV3N50xT++OvP8xePrudEj4dsMLORle+R/ueAPweyHwbbGBG7AZL3hqR9FrAja7m2pO0UklZKapXU2tGRnkHKGmqq+NpH3s5H3zmXrz29nQ/+01PsOHCs0GWZWREZduhLej/QHhHPnO0qOdpyPlIqIu6NiMURsbi+vn64JY5LZaUl3PXeS7n3d65k2/6jvP8ffsxXn36Vnj4/ZN3M8pfPkf5VwE2StgGPAO+W9BVgr6SZAMl7e7J8G9CctX4T4EFoTuM9C2bwnT+8mjc3TuITj27gPXc/yb/9crcfvWhmeRl26EfEXRHRFBFzyJyg/X5E3AqsAVYki60AHkum1wDLJVVKagHmAeuGXXkKzJ5ezeqPvoP7bltMeam442u/4KYv/oQfb/a4PWY2PGWj8J2fAVZLuh3YDnwQICJekLQaeBHoBe6ICJ+pHIIkrpvfyDWXNPDoszu5+4lfcev9T3P1m+r4g3ddxNK50ykpydVzZmZ2Ko317oLFixdHa2trocsYM0709PHVp7fzxe9v5uCxHmbVTuA33zqLD7y1iTl11YUuz8zGCEnPRMTiU9od+uPT8e4+Hn9xD994po0fb9lHBLxtzlR+861NvPfymUyZUF7oEs2sgBz6RWz3oeM8+uxOvvlMGy93HKVEcHlTLb920XSuuqiOK2dPZUKF74MzSxOHfgpEBM+3HeL7L7Xz0y37eG7Ha/T2BxWlJVxxYS1vnzud+TNruHTmZJqnTvS5ALMi5tBPoaNdvfx82wF++vJ+fvryPl7YdZiBv+7qilIunjGZS2bWcMmMyTRPm0jz1Ik0TZ1AVbn/V2A23jn0jePdffxqbycv7TnMxt2dbNx9mJf2dHLo+MnDOtdNqqRp6gSapk6gYXIV0ydVUD+pkumTKpg+qZK6SRXUTqyguqIUyf9bMBuLThf6o3HJpo1REypKWdhcy8Lm2tfbIoL2zi7aDh6j7eBxdhzIvLcdPM6GnYfo6GznaHfuK2tLBDUTyplcVUZNVeZ9UmU5EytKmVhRyoTkfWJFGRPKS6koK6GyrCR5L6Uy+VxWWkJZqSgvKaG8TJSVlFBeKkpLMtOlJXr9VZa8S1AqUSK5m8rsHDj0U04SjTVVNNZUceXs3Msc7+5j35Eu9h/tZv+RLvYd6eLQ8R4OH+/l8IkeOk/0cvh4D4dP9NB28Bgnevo41t3H8e4+jvX00dc/+v+bLC0RJcrsT4lAZN5LJEjek8nXl4E32jJ/Fpn1Tm574wdFyrxe/5wsNdB2up+ek77jpBmnWf6s9vj02xjrxk+lhfedj19NZdnIdrc69G1IEypKM33+0yae87oRQXdfPye6++nq66Orp5+u3n66e/vp6u2jq7ef3r6gpz957+unpy8z3dcf9EXQ2x/09fXTF9Db109/QH8E/cn8/v6gP6AvgojMNvuT6YFlgdfbgjfmDQz/lFnvjXkDc96YjpNGioqs/cv+fOr+n7pO9nqnLH+2f7B5r1QYMZ6KHQM0Cj+RDn0bVZKSrpxSwPcOmBWaH5doZpYiDn0zsxRx6JuZpYhD38wsRRz6ZmYp4tA3M0sRh76ZWYo49M3MUmTMD7gmqQN4dZir1wFpfKCs9ztdvN/pcrb7PTsi6gc3jvnQz4ek1lyjzBU773e6eL/TJd/9dveOmVmKOPTNzFKk2EP/3kIXUCDe73TxfqdLXvtd1H36ZmZ2smI/0jczsywOfTOzFCnK0Jd0o6RNkrZIurPQ9YwmSQ9Iape0IattmqQnJG1O3qcWssbRIKlZ0g8kbZT0gqQ/StqLet8lVUlaJ+n5ZL//Z9Je1PsNIKlU0rOSvpN8Lvp9BpC0TdJ6Sc9Jak3ahr3vRRf6kkqBfwTeC8wHbpE0v7BVjaovAzcOarsTWBsR84C1yedi0wv8SURcCiwF7kj+not937uAd0fEQmARcKOkpRT/fgP8EbAx63Ma9nnANRGxKOv6/GHve9GFPrAE2BIRr0REN/AIsKzANY2aiHgSODCoeRnwYDL9IHDzeS3qPIiI3RHxi2S6k0wYzKLI9z0yjiQfy5NXUOT7LakJ+A3gvqzmot7nIQx734sx9GcBO7I+tyVtadIYEbshE45AQ4HrGVWS5gBXAE+Tgn1PujmeA9qBJyIiDfv9OeDPgf6stmLf5wEBPC7pGUkrk7Zh73sxPhg91+PjfV1qkZI0Cfgm8N8j4rCU66+/uEREH7BIUi3wqKTLCl3TaJL0fqA9Ip6R9K5C11MAV0XELkkNwBOSXsrny4rxSL8NaM763ATsKlAthbJX0kyA5L29wPWMCknlZAL/qxHxraQ5FfsOEBGvAT8kc06nmPf7KuAmSdvIdNe+W9JXKO59fl1E7Ere24FHyXRhD3vfizH0fw7Mk9QiqQJYDqwpcE3n2xpgRTK9AnisgLWMCmUO6e8HNkbE/8maVdT7Lqk+OcJH0gTgOuAlini/I+KuiGiKiDlk/j1/PyJupYj3eYCkakmTB6aB9wAbyGPfi/KOXEnvI9MHWAo8EBGrClzSqJH0MPAuMsOt7gU+CXwbWMI9dFsAAACBSURBVA1cCGwHPhgRg0/2jmuSrgb+A1jPG/28f0GmX79o913SW8icuCslc9C2OiL+RtJ0ini/ByTdO38aEe9Pwz5Lmkvm6B4y3fFfi4hV+ex7UYa+mZnlVozdO2ZmdhoOfTOzFHHom5mliEPfzCxFHPpmZini0DczSxGHvplZivx/kcKo7F+XBZwAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(loss_history)\n", + "plt.title('Loss per epoch');" + ] + }, + { + "cell_type": "code", + "execution_count": 269, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXxcVb3v8c9vJkkr8hQKSGlKU6TQUqClSWuxVAGL8mQRCoeC0lbl4SDeq0de3AMHBUU5elTOBY/1cBC0KAUOaKEVEHn0WoGQJrSVPkopqYlUKSFAe4EmmVnnjz2TTiYzyWSe9p6Z7/v16iuZPXsyK1G+e81vrb2WOecQEZHKEvK7ASIiUnwKfxGRCqTwFxGpQAp/EZEKpPAXEalACn8RkQqk8BcRqUAKfylpZtZmZt1mdmDS8TVm5sysPva4zsx+bWZvmNnbZvaSmS2KPVcfO3dX0r8LMmzD982s3czeMbNtZnZd0vO3m9lmM4vG3zPhuduS3nO3me0c5L3CZvYdM3vNzHaa2Woz2z+TdookUvhLOXgVuDD+wMyOBT6QdM4vgXZgHDAKWAD8Pemc/Z1zeyf8++8M3/9OYKJzbl/go8BFZnZuwvNrgS8BLya/0Dn3j4nvCdwLPDDIe30r9h4nAPsCFwPvZ9hOkT5VfjdAJA9+iRfm/xF7vBD4BfCdhHOmA//knPv/scer8/XmzrnNSYeiwBEJzy8GMLNBQ9rMPgjMA85K83wt8FVginNuW+zwuiybLRVOPX8pB03AvmY2yczCwAXA3SnOWWxm883ssOH8cDO7yMz+NMQ515jZLqAD+CBwz3DeI2YesAP4Q5rnjwV6gfPM7G9m9mczuzKL9xFR+EvZiPf+TwU2AX9Nev58YCXwDeDV2JjA9KRz3jCztxL+TQJwzt3jnDtusDd3zn0P2AeYFmvL21n8DguBX7j0C27VAfsBRwLjgfOAb5rZqVm8l1Q4hb+Ui18CFwGL8Eo+/Tjnupxz1zjnJgMfAtYAD5mZJZx2oHNu/4R/G4fTAOdZDbyHV5vPmJmNBT6equ0J3ot9vdE5955z7k/AfcAZw3kvEVD4S5mI1cBfxQvCZUOc+wbwQ+BQ4IACNKcK+PAwX7MAeM45t3WQc+KlJy3FKzlT+Es5+SJwSsKgbh8z+zczO8bMqsxsH+AKYItzrjOXNzSzkJldbma15pkBXAk8lXBOjZmNBAyoNrORZpb8394CYMlg7+WcewWvdHWdmY2IlaUuAB7O5XeQyqTwl7LhnHvFOdeS5um9gAeBt4CteFM+5yad81bSnPuvAZjZZ81s/SBvfQ7wCrATb6D5P9gz8wjgcbySzUeB22Pffyz+pJmdgFfPHzDF08x+a2b/knDowljbO4FHgG84555Kfp3IUEybuYiIVB71/EVEKpDCX0SkAin8RUQqkMJfRKQClczaPgceeKCrr6/3uxkiIiWltbX1DefcQcnHSyb86+vraWlJN4tPRERSMbNtqY6r7CMiUoEU/iIiFUjhLyJSgUqm5p9KT08PHR0dvP9+eW9kNHLkSOrq6qiurva7KSJSJko6/Ds6Othnn32or6+n/8q85cM5R2dnJx0dHYwfP97v5ohImSjpss/777/PqFGjyjb4AcyMUaNGlf2nGxEprpIOf6Csgz+uEn5HkVLTuq2Lxc9soXVbl99NyUpJl31ERPzQuq2Lz97RRHdvlJqqEEsvmUnDuFq/mzUsJd/zD5JvfvOb/PCHP0z7/EMPPcSGDRuK2CIRKYSmrZ1090aJOujpjdK0Nac9gXyh8C8ihb9IeZh5+ChqqkKEDaqrQsw8fJTfTRq2igv/fNfpbrrpJo466ijmzJnD5s2bAfjpT3/K9OnTmTJlCvPmzePdd9/lueeeY8WKFVx99dVMnTqVV155JeV5IhJ8DeNqWXrJTL72yaNKsuQDFRb+8TrdzY9v5rN3NOV8AWhtbeW+++5j9erVLFu2jFWrVgFw7rnnsmrVKtauXcukSZO48847+ehHP8rcuXP5wQ9+wJo1a/jwhz+c8jwRKQ0N42q58uQjSjL4ocIGfFPV6XL5H27lypWcc8457LXXXgDMnettCbtu3Tq+/vWv89Zbb7Fr1y4+9alPpXx9pueJiORbRfX8C1GnSzUNc9GiRfz4xz/mpZde4oYbbkg7Rz/T80RE8q2iwj/fdbqPfexjPPjgg7z33nvs3LmT3/zmNwDs3LmT0aNH09PTw9KlS/vO32effdi5c2ff43TniYgUWkWVfcC7AOSrRjdt2jQuuOACpk6dyrhx45g9ezYA3/72t/nIRz7CuHHjOPbYY/sCf/78+Vx66aX86Ec/4le/+lXa80RECs2cc363ISONjY0ueTOXjRs3MmnSJJ9aVFyV9LuKSP6YWatzrjH5eEWVfURExJOX8Dezn5nZ62a2LuHYAWb2hJm9HPtam/DctWa2xcw2m5mmuIiIFFm+ev5LgNOSjl0DPOWcmwA8FXuMmR0NzAcmx17zEzML56kdIiKSgbyEv3PuD8CbSYfPBu6KfX8X8JmE4/c553Y7514FtgAz8tEOERHJTCFr/h9yzm0HiH09OHZ8DNCecF5H7JiIiBSJHwO+qRanTznlyMwuM7MWM2vZsWNHgZslIlI5Chn+fzez0QCxr6/HjncAYxPOqwNeS/UDnHO3O+canXONBx10UAGbGhx77723300QkQpQyPBfASyMfb8QWJ5wfL6ZjTCz8cAEoLmA7fBdJBLxuwkiIv3ka6rnvcDzwFFm1mFmXwS+B5xqZi8Dp8Ye45xbD9wPbAAeA650zhUvHdubYeXN3tc8aGtrY+LEiSxcuJDjjjuO8847j3fffZf6+npuvPFGTjzxRB544AFeeeUVTjvtNBoaGpg9ezabNm0C4NVXX+WEE05g+vTpfOMb38hLm0REhpKX5R2ccxemeeoTac6/CbgpH+89LO3NcNdciHRDuAYWroCxuU802rx5M3feeSezZs3iC1/4Aj/5yU8AGDlyJH/84x8B+MQnPsFtt93GhAkTeOGFF/jSl77E008/zVe+8hWuuOIKFixYwOLFi3Nui4hIJirrDt+2lV7wu4j3tW1lXn7s2LFjmTVrFgCf+9zn+gL/ggsuAGDXrl0899xznH/++UydOpXLL7+c7du3A/Dss89y4YXetfPiiy/OS3tERIZSWQu71c/2evzxnn/97Lz82ORlneOPP/jBDwIQjUbZf//9WbNmTUavF5HS07qti6atncw8fFRJbPBSWT3/sTO8Us8p1+Wt5APwl7/8heeffx6Ae++9lxNPPLHf8/vuuy/jx4/ngQceAMA5x9q1awGYNWsW9913H4CWdRYpUfneJbAYKiv8wQv82VflLfgBJk2axF133cVxxx3Hm2++yRVXXDHgnKVLl3LnnXcyZcoUJk+ezPLl3uSnW2+9lcWLFzN9+nTefvvtvLVJRIon1S6BeZHnCSqJKqvsUyChUIjbbrut37G2trZ+j8ePH89jjz024LXjx4/v+9QAcM011xSkjSJSOPFdAnt6o3nbJbBQE1TiFP4iIjmK7xLYtLWT2r1q+nr+OdX+U01QUfgHR319PevWrRv6RBEpKcMdwI2f89k7mujujVJTFcptu9gCTVCJK/nwd86V/WyZUtltTaRcxAdwhxviqWr/WYd/fIJK20ov+PPY64cSD/+RI0fS2dnJqFGjyvYC4Jyjs7OTkSNH+t0UkYqRbYjnvfY/dkbeQz+upMO/rq6Ojo4Oyn3Fz5EjR1JXV+d3M0QqRrYhnlj7D/p8/5LewF1EpFCCcNNWPtqQbgP3ku75i4gUSsO4Wl977tmOO2Sq8m7yEhEpAQW7cSxG4S8iEkDxcYewkb8bxxKo7CMikk/tzbD2HsBgyoVZz9Yp9OCxwl9EJB/iof/i3RDt8Y6tXgqLHs7pAlCocQeFv4hIrlqWwKNXQbS3//ECLMuQL6r5i4jkor05dfBDQZZlyBf1/EXEd0GYUz8siXV9HLjonucsDEedDnsfnFPNv9AU/iLiq0LPZ8+79maiPz8Ti3YDYKFqCFV5PX8LwRk3Q+Mif9uYAYW/iPgqr4uhFcFf1zzOIZEeQrHlxFy0F2v8POxXV5AF2ApF4S8ivhruOjp+l4iejxzNpwkTcl6NPxKqpirA5Z10FP4i4qvhzGcvaomovTnlcsrjjz+Zha3X82n3/wiFjOPPuIKJJRb8oPAXkQAYaj57vLf/2lvvFadENMgWig3jarn6kgU0bT2TmYePYmKAS1SDUfiLSKAl9varQkZVOEQkkse9clMZYgtFvxd9yweFv4gU3XDq9okDwpGo44IZYxmz/wcKW/Mf5haKfo9DZEPhLyJFNdy6ffKA8LxpdYUP2GFsoVhyU1VjFP4iUlTDndrp2+5YGW6hWGpTVeMU/iJSVNlskRjkGnve9+0tEm3jKCJFl1wjz7Rmnu48v2vufr//YLSNo4gERmJPPtOaebrzglBzD/Ink3S0qqeI+CrT7QrTnVfo7Q7LVcF7/mbWBuwEIkCvc67RzA4A/huoB9qAf3DOdRW6LSISPJnWzNOdV6o1d78VvOYfC/9G59wbCce+D7zpnPuemV0D1Drn/nmwn6Oav0j5KvWaf5AFreZ/NnBS7Pu7gN8Dg4a/iJSvTGvm6c5rCL1MQ9VKCM0GSm+dHT8UI/wd8LiZOeC/nHO3Ax9yzm0HcM5tN7ODU73QzC4DLgM47LDDitBUESkp8U1VVt/jraeftA6PpFeM8J/lnHstFvBPmNmmTF8Yu1DcDl7Zp1ANFJES1LdvbgSvj0lg9swthTJUwcPfOfda7OvrZvYg3meyv5vZ6FivfzTweqHbISJlIN7T37UDNv/WW3itjwViz9wgTD3NREHD38w+CISccztj338SuBFYASwEvhf7uryQ7RCREhcP/RfvhmjPwOctDA0LA7Fnbqks91Donv+HgAfNLP5e9zjnHjOzVcD9ZvZF4C/A+QVuh4hkIJDlilTlnT4GoXCg9s0tlamnWt5BRIDilCuGfXFpb4afn+4N5iYLVcO0z8GUi3zv7ScL0kU0aFM9RSQAEkOq0OWKrC4ubSvBRfc8tjAcdTrsfXAgSjzplMJyDwp/kQqVHMbXnzW5oOWKrC4u9bMhPAIiu8FCgSrvlDqFv0iFSg7jrne7C7pu/qC18CdugI0rYNJcOPVbe44PY1MVGR6Fv0iFShXGhSxXpN2U5YkbcM/e4n3/7C0YDLwAKPTzTgO+IhUsXvOv3auGrne789bjH3LAs725rzf//v2XMuKdNszAOdi9bz0jr1qbcxvEowFfkQII0qyObMTbnM9ZPoMO7KZYjuHV2pOY+E4b8X7ouv0+zoCkkrxT+ItkqVTu5BzKshc72N0TxZGfWT5pB3bbm+GuudD7PonLMex72DH89G9VnGrNPOFm0JBY8pGCUfiLZKlU7uQcTOu2Lh5oae+7dSoczn2WT9qB3baV3to7fe/mLccwZuonaTjuSh4t4U9QpUjhL5KlUrmTczBNWzvpjXphbMCkQ/bJ+Wc2jKvlobnVdG14mtqjT2FiPMzrZ3tr70S6vbtyj/9c31z9BlDoF5kGfEVyUOo1/8TSVdR5F4AR1TmWsOLlnUj3wCWWEwZ6NYOnONIN+GoPX5EcNIyr5cqTjyjJ4Ic90y9nHXEgBv3q/llpb4bffxd6d3srbsaXWI4bOwNmX6XgDwCFv0iFaxhXy1fnHMmI6hBhI/sSVrzHv/X3QBQIBWKJZUlNNX+RChcvXS06oZ7129/h9GNGZ77oWmIJJz6g66LeUgyHnwQnXatefkAp/EUqWKqa/6q2NznqkH3SXwDam2HtvbD6bm+Z5XhdP3FAN1yj4A84hb9IBUucrgoMPde/b239hCWW43X92VcVdR2eUh9s95vCX6SE5RqA8emq8QtAiEFq/u3NA4M/aevE1ugEmnoPYGZ0FA3Z/UoZKZcb7Pyk8BcpUfkIwMTF1oZc3yfV2voNC/o2UylmIJfDDXZ+U/iLlKjEAOzuiXLLk3/mq3OOzOoCkNFrhlhbv1CBnOrTTTncYOc3hb9Iiardq4aQGc45osCzW95gVdubhetxx9bW/+uax3k+cjTjDzq5X2mnEIGc7tNE2uWhJWMKf5ES1LqtixsfXk/UOSx2d1bOPe4nboCX7ofa8TDnmykHbFujE/hss9fDr2lt6nehKUQgD/ZpohS2Sgwyhb9IARR6JkpiKIYMwiHvE0BWPe6WJfD7f4Vdf/cev/Ma/Ow0+MJjAy4AQ5V28h3IKu8UjsJfJM/SlSpyvSAkvj45FK8/a3J2m7H8+lKvt5/MRbwB3qTwL3YYD/VpQtM9s6fwF8mzVL1jyG3DlFQXlJxKLO3N8OytsOnh1M9bOO2yDPOm1eFiX4sRuOk+TWi6Z24U/iJ5lqp3PJyZMKl6s6leP9wF5Vq3dfHq6mf4+HtPcNCWX3uzdlI55Fg4898H9PqTw3betLqM37sQNN0zNwp/kTxLV6rIpFySrjeba7mldVsXD93xHa4P/ZwQEZx5Szn0kyb044IWthoPyI3CX6QAkksVyRcEgMXPbBlQskkXsFnPpGlZAhuX43aP5frQ3VQR8TZKB8AgVAWjj4PjF/Sbs59K0MJW0z1zo/AXKZJ4iA9Wqx4sYIc1kyZW03exmn4DXuCbgXPgLIw1LmTTh87kqV31zDxo6OUYghi2mu6ZPYW/SJGlGxCOh2omnxAGFVtX3/W+B25P4GMhogChEKEzb6b1oLNjF6HNGQ+YKmzLh8JfpMiSe/e1e9UM+CRw5clHZD+bJbaufnxnrvhOrS/WXUzjUeP6VtxsemZLoGr4UlwKf5EcZDPPPLl8kq7On9EAa3szrL0Hdu2AvQ/2NkSPravvIt30OFgfHccyTuEzp34dEl4ftBq+FJfCXypetjcK5TLPPLl8kiqEhwznliXwyD/1X2lz9VJY9DAsXIG1rWTryCk8t6uez6T43YJYw5fiUfhLRcslwIeq3Wf7SSCjtXLam+GRr/UPfui/scrYGUwEJg7x3gr9yuRb+JvZacCtQBi4wzn3Pb/aIpUrl7nrmdTus/0kMNTx5LX1Y2V9bIgN07UcgsT5Ev5mFgYWA6cCHcAqM1vhnNvgR3ukcuVS9860dp+z+EbpHxgF73V64d5X198NDiLAM66RsWdey8Q0N2kN9SlHF4bK4lfPfwawxTm3FcDM7gPOBhT+UlS51r0zqd3npG/P3Ahe/z4EVSO8vXIXPcy6R2/jT+1v8evIbNZyJF/bVZ+2zDPYxUnr5FQev8J/DNCe8LgD+EjySWZ2GXAZwGGHHVaclknFyVfdO+8DqCn3zI32q+t3nzaBb9/RRA9DX3AG+5QTtKUbpPD8Cv8By4qwp2y554BztwO3AzQ2Ng54XiRo8nIhiZd53m6HaNKALqF+G6YP54Iz2Lma9ll5/Ar/DmBswuM64DWf2iJScEPW0xPr+o9d4/XuQ1UQroJIL4RCcMKXYeS+fTdpxQ3ngjPYwLKmfVYWv8J/FTDBzMYDfwXmAxf51BaRghqynt5X1496Ie+i3r8o0LAQ9qsbEPj5aFOqqaUK/crhS/g753rN7MvA7/Cmev7MObfej7aIFFrT1k5290RxQHdPUj09ua4fdRAKA+aVd6ZcmNfQBw3uise3ef7OuUeBR/16fykfQZ+iWLtXTd+AVjT2eE9dv6P/jVqhMJxx854pnXkOftDgrnh0h6+UtFLoxXa9203I4B/sKU4PN+NWHQdvLYvV9cMQqoZoD1jIC/6kdfXzfXHT4K6Awl9KXCn0YmcePoqfh7/Lx8IveQd2vIQzw3AQhR1HXsCW7lpqjz6FiY1z+r0224vbYBeMfA3uBv0TlwxO4S8lrRR6sQ1/vgVX9VK/tfUdhlmIaKia/7VhIs29R1CzpYelB3dltLPXYDK5YOQ6uFsKn7hkcAp/KWmBnaL4xA2wcQVMmut9JWFTFeDvx17G6IMPZlnneJpfGJE23LO5uBXj01ApfOKSwSn8peQVY4pixiWO+Gqbf4uVeJ69BQ5t6NtYBYO3D/0Yo+f9GwDjt3VR09qUNtyzubgV49NQKXziksGZc6Vx42xjY6NraWnxuxlSgTIucbQ3w5IzIdLt7ZdLbN/cAw7f8wlg0lw49VsDfn6+P7nEf2btXjV0vdtdkE9FqvmXBjNrdc41Jh9Xz19kCBmXONpWQqQHiAV/rF+1fcwnGX3qtwaEflwhPrnEf14h6/K6Kay0hfxugEjQxUscYWNgiaNlCfzyHO9r/WwIV/ftm+uAByOzWHbApb60O91mMyKgnr9IWolljevPmsxv123n9GNG7+nttiyBh7/iff/K03DWrbDoEd744895cuPr/Lr3RNaFJ7LUp3q46vIyGIW/SAr3vPAXrl++jkjUETKwkDHFbeZv2zaxKXQ+E6fPgY3L+79o43JoXMRBF87gyG1dnLy1k2t9rIcHdiaUBILCXyRJ67Yurl++jt6oV7SPOPg/LOUfqx/BcEQf/RUc8luYdLbX44+bdHbft0GphwelHRI8Cn+RBK3burjlyT/3Bf80+zOXhR/mU2FvppkZhFwvPHsrzF/qvWjjci/4k5ZlKFZ71bOXbCj8RWISp3TGQ39O+EXCeAuvWWwLIgPYud170LjIl9AH3WUruVH4SyAEoQfbtLWTyZFNXBp+mDnhVsLm9f7j8/UT74ix4xf40cR+dJet5ELhL74LSg/2E3u3cUn1d6jBW1u/316jFqIjcgDvuhHczel85qCzaSh6C/vTbB7JhcJffOd7D7ZlCWxczsTqvXAWGbjBtIV55ohruGTdZKIOwgaHDNHGYnyS0WweyYXCX3znSw82vpnK++946+/EmIXBRWIPQnDUGTDrK+wXnUDNpvRr8CRKnCY6orqwn2Q0m0eypfAX3xW9B9veDHfN9TZTIWltq0OnwujjAOu3hWIDZNTG5GmiA7ZtFAkIhb8EQmIPtqAlk/Zm+P13IbI7tn1i0gonxy9IO3snk15209ZOogmLJYZCplq8BJLCXwommxAvyOBvezOsvRd2vQ4vPw6RXiDqlXXCI+Ajl8Pf/pSXufrxElZ3b5SQGTeefYx6/RJICn8piGxDPO+Dv+3NsOQsr6cPCUsth7DDT4KTrs3rJukahJVSofCXgsg2xPM++Nu2Mlbbj3EQweimim0Tr2RiHoM/ToOwUgoU/lIQ2YZ43nvO9bMhXAOR3TighzD3R07ioehsTt5Vz8TcfrpIydJOXlIwRb9rNz59s352/1JOezOv/3EJW3fs4t9fn0ZrZALVg5SignC3sUi+aCcvKbqilT/iA7qr74ZoxOvpL1zRdwFojU7gsxs+TXdvlKpwiAtm1DFvWl3a4E83VqGLgpQThb+Utvic/d736ZuzH+n2PgHEwj9x/CESiTJm/w+kDe90YxWt27q48Kd7bvK691ItoialTds4Sk5at3Wx+JkttG7rKs4btjfDypu9r5AwoBsvX5rX86+f3feSQbdhTJLu3GUvdtDdG8UB3b1Rlr3YUZjfT6RI1POXrOU6J3/YZZTEO3PjpZ2+Ad1uCFXB8RfBlIv61fyHGkRObkeqc5NHxkpjpEwkPYW/ZC2XOflZXTjivXwX2VPamX2VdxFINdCbIN34Q7p2JJ87b1odv2pppyfiqA4b86bVZfR7igSVwl+ylsuc/KwuHIm9/MTSztgZWd+olWk7GsbVcu9lJ2jAV8qGwl+ylsuc/EEvHOmmbI6dkVEvfziGcwHTzVtSTgo2z9/MvglcCuyIHfoX59yjseeuBb4IRID/7Zz73VA/T/P8y0/Kmn+qun6OIT/U2IKmcEo582ue//91zv0wqSFHA/OBycChwJNmdqRz8UXUpVL060nHe/tvdwys6yeE/3CDOl1NP/nnKPSl0vhR9jkbuM85txt41cy2ADOA531oiwRBYm8/FPZm7UQZMGUzm0HiVDV9oOjbRurThQRNocP/y2a2AGgBrnLOdQFjgKaEczpix6TS9PX22/f09qNAwwLYb+yAun42g8SpavrF3jYyKHsUiyTKKfzN7EngkBRPXQf8J/BtvCnR3wZuBr4AA7dIJc20aTO7DLgM4LDDDsulqRI0/Xr7VV6PP97bT5qnH5fN7KJ0g9LF3DbS9z2KRVLIKfydc3MyOc/Mfgo8HHvYAYxNeLoOeC3Nz78duB28Ad/sWyqBkba3vxD2qxtyrn42s4uSa/rFXnPflz2KRYZQyNk+o51z22Pf/xPwEefcfDObDNyDV+c/FHgKmDDUgK9m+5SweOB/YBQ8ds2e3j4u5UJs5Ug1f/GLH7N9vm9mU/FKOm3A5QDOufVmdj+wAegFrtRMnzKVvNqmmbdvrotm3NsvF5pRJEFTsPB3zl08yHM3ATcV6r0lAFKttulCEArRt/jalAvLPvRFgkp3+EphpFpts2oEnPY9eK+zInr7IkGm8JfCyGC1TRHxj8JfclPEdXiGokFVkcwp/MuEL8E31Do8Oay2OVy6kUpkeBT+ZcC34Eu1vv4QYV+oi5RupBIZHoV/GfAt+NKtr59GIS9SupFKZHgU/mWg4MEXn6+P6z9oO8y6fiEvUsW+a1ek1Cn8y0Chgq91Wxevrn6Gc/90ORbtBsC9uJTQ5x/pfwHIsK5f6IuUbqQSyZzCv0zkO/gef2wFO55dwtG0Qagbiy3HF4308Nc1jzMmi4Fc9c5FgkPhLwNsWvUkH3/+89SEevuOxZeA6iXM85GjOS/Ln63euUgwKPxlgK4NT3MkvX29/YiDl9yHWR+t5zf2ca4+/mR/GygiOVP4ywC1R59Cz9b/osZ5PX8XqmHkGd/nrV31XK1yjUhZUPjLABOnz2ET/03Pi/dwyL4jOOjEzzNx7AwmZvnzdOetSPAo/CWlidPnwPSM9uoZlO68FQmmkN8NkPKWbgN1EfGXwl8KKj63P2zozluRAFHZRwpKc/tFgknhLwWnuf0iwaOyTzlqb4aVN3tfRURSUM+/XMQ3VfnAKHjsmvRr7GdBUzVFyo/CvxwkbqpiBtEoEM14jf3BaKqmSHlS2accJG6q4qJELUSUMNFQ9ZBr7A9FUzVFypN6/uUgYVOVaKiaG7o/x75uJ62RyVwdnUBDDj9amx11xVcAAAYnSURBVKSIlCeFf4BkXVtP2FRlWed4lr4wgqiDsJHzhimaqilSnhT+AZFYW68KhzivoY550+oAMgve2KYq47d1UdPalNeeuqZqipQfhX9AJNbWu3uj3PvCX/hVSztT7WWms54fPD2Zqy9ZMGQIq6cuIplQ+AdEvLa+uyeKAxxwTHQzd9X8K9X00sODPLJ6LA3jzh3yZ6mnLiJDUfgHRLzHvuzFDl5ufYrpbGCMvUE1vVRZFFwvJ4Q3AEOHv4jIUBT+AdIQepmG6nuJ1twN0V4IVQFVRKMRQlXVjJn6Sb+bKCJlQuEfFPEbtXrfJ0Rsw1wXgYYFsN9YbzpnjnfqiojEKfyDIn6jVjz4MW/u/pSLFPoikncK/6BIuFGLUBUcf5GCX0QKJqflHczsfDNbb2ZRM2tMeu5aM9tiZpvN7FMJxxvM7KXYcz8yM8ulDWUjfqPWKdfBoofhrFsU/CJSMLn2/NfhTT/5r8SDZnY0MB+YDBwKPGlmRzrnIsB/ApcBTcCjwGnAb3NsR3mI3aglIlJoOfX8nXMbnXObUzx1NnCfc263c+5VYAsww8xGA/s65553zjngF8BncmmDiIgMX6FW9RwDtCc87ogdGxP7Pvl4SmZ2mZm1mFnLjh07CtJQEZFKNGTZx8yeBA5J8dR1zrnl6V6W4pgb5HhKzrnbgdsBGhsb054nIiLDM2T4O+fmZPFzO4CxCY/rgNdix+tSHBcRkSIqVNlnBTDfzEaY2XhgAtDsnNsO7DSzmbFZPguAdJ8eRESkQHKd6nmOmXUAJwCPmNnvAJxz64H7gQ3AY8CVsZk+AFcAd+ANAr+CZvqIiBSdeZNugq+xsdG1tLT43QwRkZJiZq3Oucbk49rDV0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAIp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAKVf/i3N8PKm72vIiIC5L6Hb7C1N8NdcyHSDeEab4N07ZErIlLmPf+2lV7wu4j3tW2l3y0SEQmE8u75188mGqqGCBCqJlQ/2+8WiYgEQlmHf2t0Aj/o/hca3HpaI5O5OjqBBr8bJSISAGUd/k1bO2nuPYImdwRh8x43jKv1u1kiIr4r65r/zMNHUVMVImxQXRVi5uGj/G6SiEgglHXPv2FcLUsvmUnT1k5mHj5KvX4RkZiyDn/wLgAKfRGR/sq67CMiIqkp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQOef8bkNGzGwHsM3vdhTYgcAbfjciQPT36E9/j/709+gv3d9jnHPuoOSDJRP+lcDMWpxzjX63Iyj09+hPf4/+9Pfob7h/D5V9REQqkMJfRKQCKfyD5Xa/GxAw+nv0p79Hf/p79Desv4dq/iIiFUg9fxGRCqTwFxGpQAr/gDGzH5jZJjP7k5k9aGb7+90mP5nZ+Wa23syiZlax0/rM7DQz22xmW8zsGr/b4ycz+5mZvW5m6/xuSxCY2Vgze8bMNsb+W/lKJq9T+AfPE8AxzrnjgD8D1/rcHr+tA84F/uB3Q/xiZmFgMXA6cDRwoZkd7W+rfLUEOM3vRgRIL3CVc24SMBO4MpP/fyj8A8Y597hzrjf2sAmo87M9fnPObXTObfa7HT6bAWxxzm11znUD9wFn+9wm3zjn/gC86Xc7gsI5t90592Ls+53ARmDMUK9T+AfbF4Df+t0I8d0YoD3hcQcZ/MctlcfM6oHjgReGOrfst3EMIjN7EjgkxVPXOeeWx865Du/j3NJits0Pmfw9KpylOKY52tKPme0N/Br4qnPunaHOV/j7wDk3Z7DnzWwhcBbwCVcBN2IM9fcQOoCxCY/rgNd8aosEkJlV4wX/Uufcskxeo7JPwJjZacA/A3Odc+/63R4JhFXABDMbb2Y1wHxghc9tkoAwMwPuBDY65/4909cp/IPnx8A+wBNmtsbMbvO7QX4ys3PMrAM4AXjEzH7nd5uKLTYB4MvA7/AG8+53zq33t1X+MbN7geeBo8ysw8y+6HebfDYLuBg4JZYZa8zsjKFepOUdREQqkHr+IiIVSOEvIlKBFP4iIhVI4S8iUoEU/iIiFUjhLyJSgRT+IiIV6H8Ak8En/idUJxAAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Vis\n", + "fig, ax = plt.subplots()\n", + "ax.plot(X, y, \".\", label=\"data\")\n", + "ax.plot(X, y_hat, \".\", label=\"pred\")\n", + "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", + "ax.legend();" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Numpy + Autograd Solution" + ] + }, + { + "cell_type": "code", + "execution_count": 277, + "metadata": {}, + "outputs": [], + "source": [ + "class MixedLinearRegression(object):\n", + " def __init__(self, n_features, n_targets=1, lr=0.01):\n", + " self.W = torch.zeros(n_targets, n_features, requires_grad=True) # note requires_grad=True!\n", + " self.lr = lr\n", + " \n", + " def update_weight(self):\n", + " # Gradients are given to us by autograd!\n", + " self.W.data = self.W.data - self.lr * self.W.grad.data\n", + "\n", + " def loss(self, y_hat, y):\n", + " return torch.mean(torch.pow(y_hat - y, 2))\n", + "\n", + " def predict(self, X):\n", + " y_hat = torch.matmul(X, self.W.t())\n", + " return y_hat.squeeze(-1)\n", + "\n", + " def train(self, X, y, epochs=50):\n", + " \"\"\"\n", + " X (n_examples x n_features):\n", + " y (n_examples): gold labels\n", + " \"\"\"\n", + " loss_history = []\n", + " for _ in range(epochs):\n", + " \n", + " # Our neural net is a Line function!\n", + " y_hat = self.predict(X)\n", + " \n", + " # Compute the loss using torch operations so they are saved in the gradient history.\n", + " loss = self.loss(y_hat, y)\n", + " \n", + " # Computes the gradient of loss with respect to all Variables with requires_grad=True.\n", + " loss.backward()\n", + " loss_history.append(loss.item())\n", + "\n", + " # Update a and b using gradient descent; a.data and b.data are Tensors.\n", + " self.update_weight()\n", + "\n", + " # Reset the accumulated gradients\n", + " self.W.grad.data.zero_()\n", + " \n", + " return loss_history" + ] + }, + { + "cell_type": "code", + "execution_count": 278, + "metadata": {}, + "outputs": [], + "source": [ + "X_pt = torch.from_numpy(X_np).float()\n", + "y_pt = torch.from_numpy(y).float()" + ] + }, + { + "cell_type": "code", + "execution_count": 279, + "metadata": {}, + "outputs": [], + "source": [ + "model = MixedLinearRegression(n_features=n_features, n_targets=1, lr=0.1)\n", + "loss_history = model.train(X_pt, y_pt, epochs=50)\n", + "with torch.no_grad():\n", + " y_hat = model.predict(X_pt)" + ] + }, + { + "cell_type": "code", + "execution_count": 280, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de3gd9X3n8fdHd1u2LGPJsrGELRNzMVCboBBayIUA4bJZoGnTmm0CSck6Scnm0ssuJO2W3Se0fbpJSdM0ZAlQ4GmAsCEUmkAeqHMhBAKRwWBzi21ssGxjyzbY8k3X7/5xRjDIx5asi4905vN6nvOcOb+ZOfMd8/A5o9/M/EYRgZmZZUNJoQswM7Mjx6FvZpYhDn0zswxx6JuZZYhD38wsQxz6ZmYZ4tA3K0KSfibpk4Wuw8Yfh76NG5LWSzq30HWYFTOHvtkoklRW6BrMDsWhbxOCpP8qaY2kHZLul3R00i5J10vaKmmnpGclnZzMu0jS85I6JG2U9OcH+e6PS/qlpH9KvuNFSeek5k+TdLOkzcn3fEVS6YB1r5e0A7g2z/eXSLpa0lpJ2yXdLemoZN48SSFpqaRNyTb+LLVupaSvJ/M2JdOVqfmXSFohaVfy/RekNj03qa1D0kOS6kb0H8GKgkPfxj1JHwD+FvgDYDbwCnBXMvuDwHuB44Ba4A+B7cm8m4FPRcRU4GTgJ4fYzLuBl4E64K+BH/QHM3Ab0AO8Azg12eYn86w7E7guz3d/DrgUeB9wNPA68M8DljkbWJB899Wpbq4vA2cAi4FFwOnAXyb/LqcDtwN/kez7e4H1qe/8L8AnkroqgLw/epYxEeGXX+PiRS6wzs3TfjPw96nPU4BuYB7wAeA35IKxZMB6rwKfAmoG2e7HgU2AUm1PAh8DGoBOYFJq3mXAT1PrvjrI978AnJP6PDupvyzZhwBOSM3/e+DmZHotcFFq3vnA+mT6/wLXH2SbPwP+MvX5T4AfF/q/sV+Ff/lI3yaCo8kd3QMQEbvJHc3PiYifAN8kd+S8RdKNkmqSRX8PuAh4RdLPJf32IbaxMSLSow++kmx3LlAObJb0hqQ3yIXtzNSyGwapfy5wb2r9F4Becj8o+b6jf9sH7PuAeU3kfhQO5rXU9F5yP5aWcQ59mwg2kQtOACRVAzOAjQAR8Y2IOA04iVw3z18k7b+OiEvIBfS/AXcfYhtzJCn1+ZhkuxvIHenXRURt8qqJiJNSyw42VO0G4MLU+rURURURG1PLNOXZ9gH7PmDeBuDYQbZt9jYOfRtvyiVVpV5lwB3AJyQtTk5i/g3wRESsl/QuSe+WVA7sAfYDvZIqJP2RpGkR0Q3sInd0fTAzgc9JKpf0EeBE4IGI2Aw8BHxNUk1yUvZYSe87jH36NnCdpLkAkuolXTJgmb+SNFnSSeT64b+XtN8J/GWyTh3wP4F/TebdnPy7nJPUNUfSCYdRl2WQQ9/GmweAfanXtRGxDPgr4B5gM7mj2yXJ8jXAd8idHH2FXLfPV5N5HwPWS9oFfBr46CG2+wS5E6nbyJ2M/f2I6D8hfDm5E6HPJ9v5Prl++aH6R+B+4CFJHcCvyJ38Tfs5sAZYBnw1Ih5K2r8CtALPAiuBp5I2IuJJcj8Q1wM7k++Yi9kh6O3dmGbZI+njwCcj4qwCbHsesA4oj4ieI719yx4f6ZuZZYhD38wsQ9y9Y2aWIT7SNzPLkHE/OFRdXV3Mmzev0GWYmU0oy5cv3xYR9QPbx33oz5s3j9bW1kKXYWY2oUh6JV+7u3fMzDLEoW9mliEOfTOzDHHom5lliEPfzCxDHPpmZhni0Dczy5CiDf3bHlvPvz+zafAFzcwypGhD/65fb+C+FRsHX9DMLEOKNvQbaip5bdf+QpdhZjauFG3oz6qpYsuuzkKXYWY2rhRt6M+sqWLb7k66e/sKXYqZ2bhRtKE/q6aKCNi220f7Zmb9ijb0G2oqAXhtp/v1zcz6FXHoVwG4X9/MLCUDoe8jfTOzfkUb+jOqKygrkUPfzCylaEO/pETMnOpr9c3M0oo29AEaplWx1X36ZmZvKu7Qn1rlI30zs5RBQ19Sk6SfSnpB0nOSPp+0HyXpYUmrk/fpqXWukbRG0kuSzk+1nyZpZTLvG5I0NruVM2talfv0zcxShnKk3wP8WUScCJwBXCVpIXA1sCwiFgDLks8k85YAJwEXAN+SVJp81w3AUmBB8rpgFPflADNrKunY38Perp6x3IyZ2YQxaOhHxOaIeCqZ7gBeAOYAlwC3JYvdBlyaTF8C3BURnRGxDlgDnC5pNlATEY9HRAC3p9YZE7N8rb6Z2dscVp++pHnAqcATQENEbIbcDwMwM1lsDrAhtVpb0jYnmR7Ynm87SyW1Smptb28/nBLfpv9afd+Va2aWM+TQlzQFuAf4QkTsOtSiedriEO0HNkbcGBEtEdFSX18/1BIP4Bu0zMzebkihL6mcXOB/NyJ+kDRvSbpsSN63Ju1tQFNq9UZgU9LemKd9zPSPv+PQNzPLGcrVOwJuBl6IiH9IzbofuCKZvgK4L9W+RFKlpGZyJ2yfTLqAOiSdkXzn5al1xsTUqnKqK0p92aaZWaJsCMucCXwMWClpRdL2JeDvgLslXQm8CnwEICKek3Q38Dy5K3+uiojeZL3PALcCk4AHk9eY8g1aZmZvGTT0I+JR8vfHA5xzkHWuA67L094KnHw4BY6Ub9AyM3tLUd+RC75By8wsrehDf2ZNJVt3dZK7NcDMLNuKPvRn1VTR1dvH63u7C12KmVnBFX3o+wYtM7O3ZCb0t3Q49M3MMhD6yQ1aPtI3Myv+0J851YOumZn1K/rQrygrYUZ1ha/VNzMjA6EPuX79rQ59M7OshL4fkG5mBhkJ/dxdue7TNzPLROjPnFrF9j2ddPf2FboUM7OCykToz5pWRQS0d/ho38yyLROh33+tvvv1zSzrMhL6uWv1fQWPmWVdpkLf4++YWdZlIvSPmlxBeanY4j59M8u4TIR+SYmYObXK4++YWeYN5cHot0jaKmlVqu17klYkr/X9z86VNE/SvtS8b6fWOU3SSklrJH0jeTj6EdNQU+mRNs0s84byYPRbgW8Ct/c3RMQf9k9L+hqwM7X82ohYnOd7bgCWAr8CHgAu4Ag8GL1fQ00Vv9nScaQ2Z2Y2Lg16pB8RjwA78s1Ljtb/ALjzUN8haTZQExGPR+65hbcDlx5+ucOXG3/Hffpmlm0j7dN/D7AlIlan2polPS3p55Lek7TNAdpSy7QlbXlJWiqpVVJre3v7CEvMaaipoqOzhz2dPaPyfWZmE9FIQ/8y3n6Uvxk4JiJOBf4UuENSDZCv//6gTyqPiBsjoiUiWurr60dYYs6sacnDVHytvpll2LBDX1IZ8GHge/1tEdEZEduT6eXAWuA4ckf2janVG4FNw932cDQkD1PxXblmlmUjOdI/F3gxIt7stpFUL6k0mZ4PLABejojNQIekM5LzAJcD941g24etYVr/Xbnu1zez7BrKJZt3Ao8Dx0tqk3RlMmsJB57AfS/wrKRngO8Dn46I/pPAnwFuAtaQ+wvgiF25A6m7cn2kb2YZNuglmxFx2UHaP56n7R7gnoMs3wqcfJj1jZoplWVMqSxzn76ZZVom7sjtN7Om0qFvZpmWqdCfVeMnaJlZtmUq9BtqqjzSppllWuZCf2vHfnI3BZuZZU/GQr+S7t5gx56uQpdiZlYQmQr9Wcllm+7XN7OsylToz3wz9N2vb2bZlKnQnzXNoW9m2Zap0K+fkht0zXflmllWZSr0K8pKqJtS4T59M8usTIU+kHtWro/0zSyjMhf6s6Y59M0suzIX+rOnVdH2+r5Cl2FmVhCZC/3mump27uvmdd+gZWYZlLnQn19fDcDL2/YUuBIzsyMvc6HfXDcFgHUOfTPLoMyFfuP0SZSViHXbdhe6FDOzIy5zoV9eWsIxR032kb6ZZdJQnpF7i6Stklal2q6VtFHSiuR1UWreNZLWSHpJ0vmp9tMkrUzmfSN5QHpBNNdV83K7Q9/MsmcoR/q3Ahfkab8+IhYnrwcAJC0k98D0k5J1viWpNFn+BmApsCB55fvOI6K5rpr12/fQ1+dx9c0sWwYN/Yh4BNgxxO+7BLgrIjojYh2wBjhd0mygJiIej9wTTG4HLh1u0SPVXF/N/u4+j8FjZpkzkj79z0p6Nun+mZ60zQE2pJZpS9rmJNMD2/OStFRSq6TW9vb2EZSYX3Nd7rJN9+ubWdYMN/RvAI4FFgObga8l7fn66eMQ7XlFxI0R0RIRLfX19cMs8eDmJ5dt+lp9M8uaYYV+RGyJiN6I6AO+A5yezGoDmlKLNgKbkvbGPO0F0VBTyaTyUtb5ZK6ZZcywQj/po+/3u0D/lT33A0skVUpqJnfC9smI2Ax0SDojuWrncuC+EdQ9IpJorqv2tfpmljllgy0g6U7g/UCdpDbgr4H3S1pMrotmPfApgIh4TtLdwPNAD3BVRPQmX/UZclcCTQIeTF4F01xfzXMbdxayBDOzI27Q0I+Iy/I033yI5a8DrsvT3gqcfFjVjaH5ddX8eNVrdPX0UVGWuXvUzCyjMpt2zXXV9PYFG17fW+hSzMyOmEyHPuCTuWaWKQ59X7ZpZhmS2dCvnVzBUdUVvlbfzDIls6EP+LJNM8sch76P9M0sQzIf+lt2dbKns6fQpZiZHRGZDv35PplrZhmT6dBv9kPSzSxjMh3682b4Wn0zy5ZMh35VeSlzaif5Ch4zy4xMhz7A/HpfwWNm2ZH50G+uq+blbXvIPcXRzKy4OfTrqunY38P2PV2FLsXMbMw59H3ZppllSOZDv/95ub6Cx8yyIPOhP2f6JMpL5Wv1zSwTMh/6pSVi7gwPvGZm2TBo6Eu6RdJWSatSbf9H0ouSnpV0r6TapH2epH2SViSvb6fWOU3SSklrJH0jeUD6uOCB18wsK4ZypH8rcMGAtoeBkyPit4DfANek5q2NiMXJ69Op9huApcCC5DXwOwtmfl0167fvpbfPl22aWXEbNPQj4hFgx4C2hyKif2jKXwGNh/oOSbOBmoh4PHIXxN8OXDq8kkdfc101XT19bHpjX6FLMTMbU6PRp//HwIOpz82Snpb0c0nvSdrmAG2pZdqStrwkLZXUKqm1vb19FEo8NF+2aWZZMaLQl/RloAf4btK0GTgmIk4F/hS4Q1INkK///qB9KRFxY0S0RERLfX39SEockv7RNh36Zlbsyoa7oqQrgA8B5yRdNkREJ9CZTC+XtBY4jtyRfboLqBHYNNxtj7b6KZVMqSxz6JtZ0RvWkb6kC4D/AVwcEXtT7fWSSpPp+eRO2L4cEZuBDklnJFftXA7cN+LqR4mkN8fgMTMrZkO5ZPNO4HHgeEltkq4EvglMBR4ecGnme4FnJT0DfB/4dET0nwT+DHATsAZYy9vPAxScH5JuZlkwaPdORFyWp/nmgyx7D3DPQea1AicfVnVHUHNdNf/+7Cb2d/dSVV5a6HLMzMZE5u/I7Xf8rKlEwEuvdRS6FDOzMePQTyxqqgXgmbY3ClyJmdnYcegnjp5WRf3USla86tA3s+Ll0E9IYlFjLSs2OPTNrHg59FNOPaaWl7ftYefe7kKXYmY2Jhz6KYsa3a9vZsXNoZ/yW03TkHAXj5kVLYd+Sk1VOcfWT+EZh76ZFSmH/gD9J3OT4YTMzIqKQ3+AxcfUsn1PF22ve2x9Mys+Dv0BTk1u0nK/vpkVI4f+AMfPmkplWYlD38yKkkN/gPLSEk6eM82hb2ZFyaGfx+KmWlZt3El3b1+hSzEzG1UO/TwWNdXS2dPnETfNrOg49PPoP5n7tLt4zKzIOPTzaJw+iaOqK3yTlpkVHYd+HpJY3OQRN82s+AzlGbm3SNoqaVWq7ShJD0tanbxPT827RtIaSS9JOj/Vfpqklcm8byQPSB+3FjfVsrZ9N7v2e8RNMyseQznSvxW4YEDb1cCyiFgALEs+I2khsAQ4KVnnW5L6Hzh7A7AUWJC8Bn7nuLKoqZYIWNm2s9ClmJmNmkFDPyIeAXYMaL4EuC2Zvg24NNV+V0R0RsQ6YA1wuqTZQE1EPB65QW1uT60zLi1u9J25ZlZ8htun3xARmwGS95lJ+xxgQ2q5tqRtTjI9sH3cmja5nPl11TztxyeaWREZ7RO5+frp4xDt+b9EWiqpVVJre3v7qBV3uBY1ecRNMysuww39LUmXDcn71qS9DWhKLdcIbEraG/O05xURN0ZES0S01NfXD7PEkVvcVMu23Z1s2rm/YDWYmY2m4Yb+/cAVyfQVwH2p9iWSKiU1kzth+2TSBdQh6Yzkqp3LU+uMW4v6R9x0F4+ZFYmhXLJ5J/A4cLykNklXAn8HnCdpNXBe8pmIeA64G3ge+DFwVUT0Jl/1GeAmcid31wIPjvK+jLoTZ0+lorTEz8w1s6JRNtgCEXHZQWadc5DlrwOuy9PeCpx8WNUVWGVZKQuPrvGRvpkVDd+RO4jFTbWs3LiTHo+4aWZFwKE/iMVNtezr7uWlLR5x08wmPof+IFrm5UaYeGzN9gJXYmY2cg79QTROn8wJs6by8AtbCl2KmdmIOfSH4LyFDbSu38GOPV2FLsXMbEQc+kNw7okN9AX89MWtgy9sZjaOOfSH4JQ502ioqeTh593FY2YTm0N/CEpKxDknNvDI6nb2d/cOvoKZ2Tjl0B+i8xY2sLerl8df9lU8ZjZxOfSH6Lfnz2ByRam7eMxsQnPoD1FVeSnvO66eZS9soa/PQy2b2cTk0D8M557YwJZdnazc6EcomtnE5NA/DGefMJMSwX/4Ri0zm6Ac+ofhqOoKWuYd5X59M5uwHPqH6bwTG3jxtQ427Nhb6FLMzA6bQ/8wnbuwAXAXj5lNTA79w9RcV807Zk5xF4+ZTUgO/WE4b2EDT6zbwc693YUuxczssDj0h+HcExvo7Qt+9hsPwGZmE8uwQ1/S8ZJWpF67JH1B0rWSNqbaL0qtc42kNZJeknT+6OzCkXdqUy11UyrcxWNmE86gD0Y/mIh4CVgMIKkU2AjcC3wCuD4ivppeXtJCYAlwEnA08B+SjouICTeCWUmJOOeEBh5YuZmunj4qyvwHk5lNDKOVVucAayPilUMscwlwV0R0RsQ6YA1w+iht/4g7b2EDHZ09PLHOA7CZ2cQxWqG/BLgz9fmzkp6VdIuk6UnbHGBDapm2pO0AkpZKapXU2t7ePkoljq4z31HH5IpS/u3pTYUuxcxsyEYc+pIqgIuB/5c03QAcS67rZzPwtf5F86yed+SyiLgxIloioqW+vn6kJY6JSRWl/P5pjfz7M5vY2rG/0OWYmQ3JaBzpXwg8FRFbACJiS0T0RkQf8B3e6sJpA5pS6zUCE/ow+RNnNtPV28e//urVQpdiZjYkoxH6l5Hq2pE0OzXvd4FVyfT9wBJJlZKagQXAk6Ow/YJprqvmnBNm8t1fveInapnZhDCi0Jc0GTgP+EGq+e8lrZT0LHA28EWAiHgOuBt4HvgxcNVEvHJnoCvPamb7ni7uXzGh/2gxs4xQxPh+IEhLS0u0trYWuoyDiggu/MdfAPDg59+DlO/UhZnZkSVpeUS0DGz3BeYjJIk/PquZF1/r4LG1vnzTzMY3h/4ouHjR0dRNqeDmR9cVuhQzs0Ny6I+CqvJS/ujdc/nJi1tZ27670OWYmR2UQ3+UfPSMuVSUlnDrL9cXuhQzs4Ny6I+S+qmVXLL4aL6/vI039nYVuhwzs7wc+qPoE2c2s6+7lzuf3DD4wmZmBeDQH0ULj67hd46dwe2Pr6e7t6/Q5ZiZHcChP8quPKuZzTv38+Cq1wpdipnZARz6o+zs42fSXFfNPy1bTVePj/bNbHxx6I+ykhLx5YtOZPXW3XznFy8Xuhwzs7dx6I+Bcxc2cNEps/jHZatZt21PocsxM3uTQ3+MXPufT6KyrIQv/WAl4318IzPLDof+GJlZU8XVF57A4y9v556nNha6HDMzwKE/pi571zG0zJ3OV370PNt3dxa6HDMzh/5YKikRf/vhU9jT2cNXfvRCocsxM3Poj7UFDVP5zPuO5d6nN/KL1ePzIe9mlh0O/SPgT85+B/PrqvnyvavY1zXhHxZmZhOYQ/8IqCov5W8+fAqv7tjL15f9ptDlmFmGjfQZueuT5+GukNSatB0l6WFJq5P36anlr5G0RtJLks4fafETyRnzZ7DkXU3c+MjL/PBZP0/XzApjNI70z46IxalnMV4NLIuIBcCy5DOSFgJLgJOAC4BvSSodhe1PGNdefBItc6fzxe+t4JdrthW6HDPLoLHo3rkEuC2Zvg24NNV+V0R0RsQ6YA1w+hhsf9yqKi/lpsvfxfy6KSy9vZVVG3cWuiQzy5iRhn4AD0laLmlp0tYQEZsBkveZSfscID3QfFvSdgBJSyW1Smptby+uK16mTS7n9itPp3ZyBR//lydZ72EazOwIGmnonxkR7wQuBK6S9N5DLKs8bXnHJ4iIGyOiJSJa6uvrR1ji+NNQU8XtV55Ob19w+S1PsrVjf6FLMrOMGFHoR8Sm5H0rcC+57potkmYDJO9bk8XbgKbU6o1AZs9oHls/hX/5xOm0d3RyxS2/Ztf+7kKXZGYZMOzQl1QtaWr/NPBBYBVwP3BFstgVwH3J9P3AEkmVkpqBBcCTw91+MVjcVMsNH30nq7d0sPT2VnZ39hS6JDMrciM50m8AHpX0DLnw/lFE/Bj4O+A8SauB85LPRMRzwN3A88CPgasiIvN3Kr3/+Jl89SOLeHLdDi7+5qO89FpHoUsysyKm8T7sb0tLS7S2tha6jDH3q5e389k7nmZPZw9/8+GT+d1TGwtdkplNYJKWpy6lf5PvyB0nzpg/gwc+dxanNE7ji997hi/du5L93Zn/Q8jMRplDfxyZWVPFHZ98N59633zueOJVPvLtx9mwY2+hyzKzIuLQH2fKSku45sITufFjp7F++x4+9E+P8t0nXqG71w9ZN7ORc+iPUx88aRY//G9ncVzDFL587yo+eP0j/OjZzX70opmNiEN/HJs7o5q7P/Xb3HR5C+Wl4qo7nuLib/6SR1d73B4zGx6H/jgniXMXNvDg59/LVz+yiB17uvjozU/w0Zue4LE12+jr85G/mQ2dL9mcYPZ39/LdJ17lmz9Zzet7u5lTO4nfe+ccPvzORubVVRe6PDMbJw52yaZDf4La19XLQ8+/xveXt/Homm1EwLvmTef33tnIhafMZtqk8kKXaGYF5NAvYpt37uPepzdyz/I21rbvoURwSmMtv3PsDM48to7T5k5nUkWmHl1glnkO/QyICJ5p28lPXtzKY2u2sWLDG/T0BRWlJZx6TC3vnj+DhbNrOHH2VJqmT6akJN/Ap2ZWDBz6GbSns4dfr9/BY2u389jabTy3aRf9/7mrK0o5ftZUTphdwwmzptJ01GSapk+mcfokqsr9V4HZROfQN/Z19fKbLR28+NouXtjcwQubd/Hiax3s3Pf2YZ3rplTSOH0SjdMnMXNqFTOmVFA/pZIZUyqYMaWSuikV1E6uoLqiFMl/LZiNRwcL/bJCFGOFMamilEVNtSxqqn2zLSLY2tFJ2+t7aXt9Hxt25N7bXt/Hqo07ae/Yyp6u/GMAlQhqJpUztaqMmqrc+5TKciZXlDK5opRJyfvkijImlZdSUVZCZVlJ8l5KZfK5rLSEslJRXlJCeZkoKymhvFSUluSmS0v05qsseZegVKJEcjeV2WFw6GecJBpqqmioqeK0ufmX2dfVy7bdnWzf08X23Z1s293Jzn3d7NrXw6793XTs72HXvm527e+m7fW97O/uZW9XL/u6etnb3UvvEbiXoLRElCi3PyUCkXsvkSB5TybfXAbeasv9W+TWe3vbWz8oUu715udkqf62g/30vO073jbjIMsPaY8Pvo3xbuJUWng//NxZVJaNbnerQ98GNamiNNfnf9Tkw143Iujq7WN/Vx+dvb10dvfR2dNHV08fnT29dPb00dMbdPcl7719dPfmpnv7gt4IevqC3t4+egN6evvoC+iLoC+Z39cX9AX0RhCR22ZfMt2/LPBmW/DWvP4ndubWe2te/5y3puNtD/eM1P6lPx+4/weuk17vgOWH+g874pUKIyZSseOAxuAn0qFvY0pS0pVTCvjeAbNC8zAMZmYZ4tA3M8uQkTwYvUnSTyW9IOk5SZ9P2q+VtFHSiuR1UWqdayStkfSSpPNHYwfMzGzoRtKn3wP8WUQ8JWkqsFzSw8m86yPiq+mFJS0ElgAnAUcD/yHpOD8c3czsyBn2kX5EbI6Ip5LpDuAFYM4hVrkEuCsiOiNiHbAGOH242zczs8M3Kn36kuYBpwJPJE2flfSspFskTU/a5gAbUqu1cZAfCUlLJbVKam1vbx+NEs3MjFEIfUlTgHuAL0TELuAG4FhgMbAZ+Fr/onlWz3vRbkTcGBEtEdFSX18/0hLNzCwxotCXVE4u8L8bET8AiIgtEdEbEX3Ad3irC6cNaEqt3ghsGsn2zczs8Ax7wDXl7vu+DdgREV9Itc+OiM3J9BeBd0fEEkknAXeQ+xE4GlgGLBjsRK6kduCVYRUJdUAWHyjr/c4W73e2DHW/50bEAV0lI7l650zgY8BKSSuSti8Bl0laTK7rZj3wKYCIeE7S3cDz5K78uWooV+7kK3qoJLXmG2Wu2Hm/s8X7nS0j3e9hh35EPEr+fvoHDrHOdcB1w92mmZmNjO/INTPLkGIP/RsLXUCBeL+zxfudLSPa73H/5CwzMxs9xX6kb2ZmKQ59M7MMKcrQl3RBMpLnGklXF7qesZQMdbFV0qpU21GSHpa0OnmffqjvmIgOMcprUe+7pCpJT0p6Jtnv/5W0F/V+A0gqlfS0pB8mn4t+nwEkrZe0Mhm1uDVpG/a+F13oSyoF/hm4EFhI7r6BhYWtakzdClwwoO1qYFlELCB3E1wx/vD1j/J6InAGcFXy37nY970T+EBELCI31MkFks6g+Pcb4PPkBnbsl4V97nd2RCxOXZ8/7H0vutAnd8fvmoh4OSK6gLvIjfBZlCLiEWDHgOZLyN0tTfJ+6REt6gg4xCivRb3vkbM7+VievIIi329JjcB/Am5KNRf1Pg9i2PtejKE/5NE8i1hD/1AYyfvMAtczpgaM8lr0+1vcQyoAAAGkSURBVJ50c6wAtgIPR0QW9vvrwH8H+lJtxb7P/QJ4SNJySUuTtmHvezE+GH3Io3naxDdwlNfckFDFLRm+ZLGkWuBeSScXuqaxJOlDwNaIWC7p/YWupwDOjIhNkmYCD0t6cSRfVoxH+h7NE7ZImg25AfDIHREWnXyjvJKRfQeIiDeAn5E7p1PM+30mcLGk9eS6az8g6V8p7n1+U0RsSt63AveS68Ie9r4XY+j/GlggqVlSBblHNN5f4JqOtPuBK5LpK4D7CljLmEhGeb0ZeCEi/iE1q6j3XVJ9coSPpEnAucCLFPF+R8Q1EdEYEfPI/f/8k4j4KEW8z/0kVSePo0VSNfBBYBUj2PeivCM3eRj714FS4JZkoLeiJOlO4P3khlvdAvw18G/A3cAxwKvARyJi4MneCU3SWcAvgJW81c/7JXL9+kW775J+i9yJu1JyB213R8T/ljSDIt7vfkn3zp9HxIeysM+S5pM7uodcd/wdEXHdSPa9KEPfzMzyK8buHTMzOwiHvplZhjj0zcwyxKFvZpYhDn0zswxx6JuZZYhD38wsQ/4/XHpljCUeAlgAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(loss_history)\n", + "plt.title('Loss per epoch');" + ] + }, + { + "cell_type": "code", + "execution_count": 281, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXxcVb3v8c9vJkkr8hQKSGlKU6TQUqClSWuxVAGL8mQRCoeC0lbl4SDeq0de3AMHBUU5elTOBY/1cBC0KAUOaKEVEHn0WoGQJrSVPkopqYlUKSFAe4EmmVnnjz2TTiYzyWSe9p6Z7/v16iuZPXsyK1G+e81vrb2WOecQEZHKEvK7ASIiUnwKfxGRCqTwFxGpQAp/EZEKpPAXEalACn8RkQqk8BcRqUAKfylpZtZmZt1mdmDS8TVm5sysPva4zsx+bWZvmNnbZvaSmS2KPVcfO3dX0r8LMmzD982s3czeMbNtZnZd0vO3m9lmM4vG3zPhuduS3nO3me0c5L3CZvYdM3vNzHaa2Woz2z+TdookUvhLOXgVuDD+wMyOBT6QdM4vgXZgHDAKWAD8Pemc/Z1zeyf8++8M3/9OYKJzbl/go8BFZnZuwvNrgS8BLya/0Dn3j4nvCdwLPDDIe30r9h4nAPsCFwPvZ9hOkT5VfjdAJA9+iRfm/xF7vBD4BfCdhHOmA//knPv/scer8/XmzrnNSYeiwBEJzy8GMLNBQ9rMPgjMA85K83wt8FVginNuW+zwuiybLRVOPX8pB03AvmY2yczCwAXA3SnOWWxm883ssOH8cDO7yMz+NMQ515jZLqAD+CBwz3DeI2YesAP4Q5rnjwV6gfPM7G9m9mczuzKL9xFR+EvZiPf+TwU2AX9Nev58YCXwDeDV2JjA9KRz3jCztxL+TQJwzt3jnDtusDd3zn0P2AeYFmvL21n8DguBX7j0C27VAfsBRwLjgfOAb5rZqVm8l1Q4hb+Ui18CFwGL8Eo+/Tjnupxz1zjnJgMfAtYAD5mZJZx2oHNu/4R/G4fTAOdZDbyHV5vPmJmNBT6equ0J3ot9vdE5955z7k/AfcAZw3kvEVD4S5mI1cBfxQvCZUOc+wbwQ+BQ4IACNKcK+PAwX7MAeM45t3WQc+KlJy3FKzlT+Es5+SJwSsKgbh8z+zczO8bMqsxsH+AKYItzrjOXNzSzkJldbma15pkBXAk8lXBOjZmNBAyoNrORZpb8394CYMlg7+WcewWvdHWdmY2IlaUuAB7O5XeQyqTwl7LhnHvFOdeS5um9gAeBt4CteFM+5yad81bSnPuvAZjZZ81s/SBvfQ7wCrATb6D5P9gz8wjgcbySzUeB22Pffyz+pJmdgFfPHzDF08x+a2b/knDowljbO4FHgG84555Kfp3IUEybuYiIVB71/EVEKpDCX0SkAin8RUQqkMJfRKQClczaPgceeKCrr6/3uxkiIiWltbX1DefcQcnHSyb86+vraWlJN4tPRERSMbNtqY6r7CMiUoEU/iIiFUjhLyJSgUqm5p9KT08PHR0dvP9+eW9kNHLkSOrq6qiurva7KSJSJko6/Ds6Othnn32or6+n/8q85cM5R2dnJx0dHYwfP97v5ohImSjpss/777/PqFGjyjb4AcyMUaNGlf2nGxEprpIOf6Csgz+uEn5HkVLTuq2Lxc9soXVbl99NyUpJl31ERPzQuq2Lz97RRHdvlJqqEEsvmUnDuFq/mzUsJd/zD5JvfvOb/PCHP0z7/EMPPcSGDRuK2CIRKYSmrZ1090aJOujpjdK0Nac9gXyh8C8ihb9IeZh5+ChqqkKEDaqrQsw8fJTfTRq2igv/fNfpbrrpJo466ijmzJnD5s2bAfjpT3/K9OnTmTJlCvPmzePdd9/lueeeY8WKFVx99dVMnTqVV155JeV5IhJ8DeNqWXrJTL72yaNKsuQDFRb+8TrdzY9v5rN3NOV8AWhtbeW+++5j9erVLFu2jFWrVgFw7rnnsmrVKtauXcukSZO48847+ehHP8rcuXP5wQ9+wJo1a/jwhz+c8jwRKQ0N42q58uQjSjL4ocIGfFPV6XL5H27lypWcc8457LXXXgDMnettCbtu3Tq+/vWv89Zbb7Fr1y4+9alPpXx9pueJiORbRfX8C1GnSzUNc9GiRfz4xz/mpZde4oYbbkg7Rz/T80RE8q2iwj/fdbqPfexjPPjgg7z33nvs3LmT3/zmNwDs3LmT0aNH09PTw9KlS/vO32effdi5c2ff43TniYgUWkWVfcC7AOSrRjdt2jQuuOACpk6dyrhx45g9ezYA3/72t/nIRz7CuHHjOPbYY/sCf/78+Vx66aX86Ec/4le/+lXa80RECs2cc363ISONjY0ueTOXjRs3MmnSJJ9aVFyV9LuKSP6YWatzrjH5eEWVfURExJOX8Dezn5nZ62a2LuHYAWb2hJm9HPtam/DctWa2xcw2m5mmuIiIFFm+ev5LgNOSjl0DPOWcmwA8FXuMmR0NzAcmx17zEzML56kdIiKSgbyEv3PuD8CbSYfPBu6KfX8X8JmE4/c553Y7514FtgAz8tEOERHJTCFr/h9yzm0HiH09OHZ8DNCecF5H7JiIiBSJHwO+qRanTznlyMwuM7MWM2vZsWNHgZslIlI5Chn+fzez0QCxr6/HjncAYxPOqwNeS/UDnHO3O+canXONBx10UAGbGhx77723300QkQpQyPBfASyMfb8QWJ5wfL6ZjTCz8cAEoLmA7fBdJBLxuwkiIv3ka6rnvcDzwFFm1mFmXwS+B5xqZi8Dp8Ye45xbD9wPbAAeA650zhUvHdubYeXN3tc8aGtrY+LEiSxcuJDjjjuO8847j3fffZf6+npuvPFGTjzxRB544AFeeeUVTjvtNBoaGpg9ezabNm0C4NVXX+WEE05g+vTpfOMb38hLm0REhpKX5R2ccxemeeoTac6/CbgpH+89LO3NcNdciHRDuAYWroCxuU802rx5M3feeSezZs3iC1/4Aj/5yU8AGDlyJH/84x8B+MQnPsFtt93GhAkTeOGFF/jSl77E008/zVe+8hWuuOIKFixYwOLFi3Nui4hIJirrDt+2lV7wu4j3tW1lXn7s2LFjmTVrFgCf+9zn+gL/ggsuAGDXrl0899xznH/++UydOpXLL7+c7du3A/Dss89y4YXetfPiiy/OS3tERIZSWQu71c/2evzxnn/97Lz82ORlneOPP/jBDwIQjUbZf//9WbNmTUavF5HS07qti6atncw8fFRJbPBSWT3/sTO8Us8p1+Wt5APwl7/8heeffx6Ae++9lxNPPLHf8/vuuy/jx4/ngQceAMA5x9q1awGYNWsW9913H4CWdRYpUfneJbAYKiv8wQv82VflLfgBJk2axF133cVxxx3Hm2++yRVXXDHgnKVLl3LnnXcyZcoUJk+ezPLl3uSnW2+9lcWLFzN9+nTefvvtvLVJRIon1S6BeZHnCSqJKqvsUyChUIjbbrut37G2trZ+j8ePH89jjz024LXjx4/v+9QAcM011xSkjSJSOPFdAnt6o3nbJbBQE1TiFP4iIjmK7xLYtLWT2r1q+nr+OdX+U01QUfgHR319PevWrRv6RBEpKcMdwI2f89k7mujujVJTFcptu9gCTVCJK/nwd86V/WyZUtltTaRcxAdwhxviqWr/WYd/fIJK20ov+PPY64cSD/+RI0fS2dnJqFGjyvYC4Jyjs7OTkSNH+t0UkYqRbYjnvfY/dkbeQz+upMO/rq6Ojo4Oyn3Fz5EjR1JXV+d3M0QqRrYhnlj7D/p8/5LewF1EpFCCcNNWPtqQbgP3ku75i4gUSsO4Wl977tmOO2Sq8m7yEhEpAQW7cSxG4S8iEkDxcYewkb8bxxKo7CMikk/tzbD2HsBgyoVZz9Yp9OCxwl9EJB/iof/i3RDt8Y6tXgqLHs7pAlCocQeFv4hIrlqWwKNXQbS3//ECLMuQL6r5i4jkor05dfBDQZZlyBf1/EXEd0GYUz8siXV9HLjonucsDEedDnsfnFPNv9AU/iLiq0LPZ8+79maiPz8Ti3YDYKFqCFV5PX8LwRk3Q+Mif9uYAYW/iPgqr4uhFcFf1zzOIZEeQrHlxFy0F2v8POxXV5AF2ApF4S8ivhruOjp+l4iejxzNpwkTcl6NPxKqpirA5Z10FP4i4qvhzGcvaomovTnlcsrjjz+Zha3X82n3/wiFjOPPuIKJJRb8oPAXkQAYaj57vLf/2lvvFadENMgWig3jarn6kgU0bT2TmYePYmKAS1SDUfiLSKAl9varQkZVOEQkkse9clMZYgtFvxd9yweFv4gU3XDq9okDwpGo44IZYxmz/wcKW/Mf5haKfo9DZEPhLyJFNdy6ffKA8LxpdYUP2GFsoVhyU1VjFP4iUlTDndrp2+5YGW6hWGpTVeMU/iJSVNlskRjkGnve9+0tEm3jKCJFl1wjz7Rmnu48v2vufr//YLSNo4gERmJPPtOaebrzglBzD/Ink3S0qqeI+CrT7QrTnVfo7Q7LVcF7/mbWBuwEIkCvc67RzA4A/huoB9qAf3DOdRW6LSISPJnWzNOdV6o1d78VvOYfC/9G59wbCce+D7zpnPuemV0D1Drn/nmwn6Oav0j5KvWaf5AFreZ/NnBS7Pu7gN8Dg4a/iJSvTGvm6c5rCL1MQ9VKCM0GSm+dHT8UI/wd8LiZOeC/nHO3Ax9yzm0HcM5tN7ODU73QzC4DLgM47LDDitBUESkp8U1VVt/jraeftA6PpFeM8J/lnHstFvBPmNmmTF8Yu1DcDl7Zp1ANFJES1LdvbgSvj0lg9swthTJUwcPfOfda7OvrZvYg3meyv5vZ6FivfzTweqHbISJlIN7T37UDNv/WW3itjwViz9wgTD3NREHD38w+CISccztj338SuBFYASwEvhf7uryQ7RCREhcP/RfvhmjPwOctDA0LA7Fnbqks91Donv+HgAfNLP5e9zjnHjOzVcD9ZvZF4C/A+QVuh4hkIJDlilTlnT4GoXCg9s0tlamnWt5BRIDilCuGfXFpb4afn+4N5iYLVcO0z8GUi3zv7ScL0kU0aFM9RSQAEkOq0OWKrC4ubSvBRfc8tjAcdTrsfXAgSjzplMJyDwp/kQqVHMbXnzW5oOWKrC4u9bMhPAIiu8FCgSrvlDqFv0iFSg7jrne7C7pu/qC18CdugI0rYNJcOPVbe44PY1MVGR6Fv0iFShXGhSxXpN2U5YkbcM/e4n3/7C0YDLwAKPTzTgO+IhUsXvOv3auGrne789bjH3LAs725rzf//v2XMuKdNszAOdi9bz0jr1qbcxvEowFfkQII0qyObMTbnM9ZPoMO7KZYjuHV2pOY+E4b8X7ouv0+zoCkkrxT+ItkqVTu5BzKshc72N0TxZGfWT5pB3bbm+GuudD7PonLMex72DH89G9VnGrNPOFm0JBY8pGCUfiLZKlU7uQcTOu2Lh5oae+7dSoczn2WT9qB3baV3to7fe/mLccwZuonaTjuSh4t4U9QpUjhL5KlUrmTczBNWzvpjXphbMCkQ/bJ+Wc2jKvlobnVdG14mtqjT2FiPMzrZ3tr70S6vbtyj/9c31z9BlDoF5kGfEVyUOo1/8TSVdR5F4AR1TmWsOLlnUj3wCWWEwZ6NYOnONIN+GoPX5EcNIyr5cqTjyjJ4Ic90y9nHXEgBv3q/llpb4bffxd6d3srbsaXWI4bOwNmX6XgDwCFv0iFaxhXy1fnHMmI6hBhI/sSVrzHv/X3QBQIBWKJZUlNNX+RChcvXS06oZ7129/h9GNGZ77oWmIJJz6g66LeUgyHnwQnXatefkAp/EUqWKqa/6q2NznqkH3SXwDam2HtvbD6bm+Z5XhdP3FAN1yj4A84hb9IBUucrgoMPde/b239hCWW43X92VcVdR2eUh9s95vCX6SE5RqA8emq8QtAiEFq/u3NA4M/aevE1ugEmnoPYGZ0FA3Z/UoZKZcb7Pyk8BcpUfkIwMTF1oZc3yfV2voNC/o2UylmIJfDDXZ+U/iLlKjEAOzuiXLLk3/mq3OOzOoCkNFrhlhbv1CBnOrTTTncYOc3hb9Iiardq4aQGc45osCzW95gVdubhetxx9bW/+uax3k+cjTjDzq5X2mnEIGc7tNE2uWhJWMKf5ES1LqtixsfXk/UOSx2d1bOPe4nboCX7ofa8TDnmykHbFujE/hss9fDr2lt6nehKUQgD/ZpohS2Sgwyhb9IARR6JkpiKIYMwiHvE0BWPe6WJfD7f4Vdf/cev/Ma/Ow0+MJjAy4AQ5V28h3IKu8UjsJfJM/SlSpyvSAkvj45FK8/a3J2m7H8+lKvt5/MRbwB3qTwL3YYD/VpQtM9s6fwF8mzVL1jyG3DlFQXlJxKLO3N8OytsOnh1M9bOO2yDPOm1eFiX4sRuOk+TWi6Z24U/iJ5lqp3PJyZMKl6s6leP9wF5Vq3dfHq6mf4+HtPcNCWX3uzdlI55Fg4898H9PqTw3betLqM37sQNN0zNwp/kTxLV6rIpFySrjeba7mldVsXD93xHa4P/ZwQEZx5Szn0kyb044IWthoPyI3CX6QAkksVyRcEgMXPbBlQskkXsFnPpGlZAhuX43aP5frQ3VQR8TZKB8AgVAWjj4PjF/Sbs59K0MJW0z1zo/AXKZJ4iA9Wqx4sYIc1kyZW03exmn4DXuCbgXPgLIw1LmTTh87kqV31zDxo6OUYghi2mu6ZPYW/SJGlGxCOh2omnxAGFVtX3/W+B25P4GMhogChEKEzb6b1oLNjF6HNGQ+YKmzLh8JfpMiSe/e1e9UM+CRw5clHZD+bJbaufnxnrvhOrS/WXUzjUeP6VtxsemZLoGr4UlwKf5EcZDPPPLl8kq7On9EAa3szrL0Hdu2AvQ/2NkSPravvIt30OFgfHccyTuEzp34dEl4ftBq+FJfCXypetjcK5TLPPLl8kiqEhwznliXwyD/1X2lz9VJY9DAsXIG1rWTryCk8t6uez6T43YJYw5fiUfhLRcslwIeq3Wf7SSCjtXLam+GRr/UPfui/scrYGUwEJg7x3gr9yuRb+JvZacCtQBi4wzn3Pb/aIpUrl7nrmdTus/0kMNTx5LX1Y2V9bIgN07UcgsT5Ev5mFgYWA6cCHcAqM1vhnNvgR3ukcuVS9860dp+z+EbpHxgF73V64d5X198NDiLAM66RsWdey8Q0N2kN9SlHF4bK4lfPfwawxTm3FcDM7gPOBhT+UlS51r0zqd3npG/P3Ahe/z4EVSO8vXIXPcy6R2/jT+1v8evIbNZyJF/bVZ+2zDPYxUnr5FQev8J/DNCe8LgD+EjySWZ2GXAZwGGHHVaclknFyVfdO+8DqCn3zI32q+t3nzaBb9/RRA9DX3AG+5QTtKUbpPD8Cv8By4qwp2y554BztwO3AzQ2Ng54XiRo8nIhiZd53m6HaNKALqF+G6YP54Iz2Lma9ll5/Ar/DmBswuM64DWf2iJScEPW0xPr+o9d4/XuQ1UQroJIL4RCcMKXYeS+fTdpxQ3ngjPYwLKmfVYWv8J/FTDBzMYDfwXmAxf51BaRghqynt5X1496Ie+i3r8o0LAQ9qsbEPj5aFOqqaUK/crhS/g753rN7MvA7/Cmev7MObfej7aIFFrT1k5290RxQHdPUj09ua4fdRAKA+aVd6ZcmNfQBw3uise3ef7OuUeBR/16fykfQZ+iWLtXTd+AVjT2eE9dv6P/jVqhMJxx854pnXkOftDgrnh0h6+UtFLoxXa9203I4B/sKU4PN+NWHQdvLYvV9cMQqoZoD1jIC/6kdfXzfXHT4K6Awl9KXCn0YmcePoqfh7/Lx8IveQd2vIQzw3AQhR1HXsCW7lpqjz6FiY1z+r0224vbYBeMfA3uBv0TlwxO4S8lrRR6sQ1/vgVX9VK/tfUdhlmIaKia/7VhIs29R1CzpYelB3dltLPXYDK5YOQ6uFsKn7hkcAp/KWmBnaL4xA2wcQVMmut9JWFTFeDvx17G6IMPZlnneJpfGJE23LO5uBXj01ApfOKSwSn8peQVY4pixiWO+Gqbf4uVeJ69BQ5t6NtYBYO3D/0Yo+f9GwDjt3VR09qUNtyzubgV49NQKXziksGZc6Vx42xjY6NraWnxuxlSgTIucbQ3w5IzIdLt7ZdLbN/cAw7f8wlg0lw49VsDfn6+P7nEf2btXjV0vdtdkE9FqvmXBjNrdc41Jh9Xz19kCBmXONpWQqQHiAV/rF+1fcwnGX3qtwaEflwhPrnEf14h6/K6Kay0hfxugEjQxUscYWNgiaNlCfzyHO9r/WwIV/ftm+uAByOzWHbApb60O91mMyKgnr9IWolljevPmsxv123n9GNG7+nttiyBh7/iff/K03DWrbDoEd744895cuPr/Lr3RNaFJ7LUp3q46vIyGIW/SAr3vPAXrl++jkjUETKwkDHFbeZv2zaxKXQ+E6fPgY3L+79o43JoXMRBF87gyG1dnLy1k2t9rIcHdiaUBILCXyRJ67Yurl++jt6oV7SPOPg/LOUfqx/BcEQf/RUc8luYdLbX44+bdHbft0GphwelHRI8Cn+RBK3burjlyT/3Bf80+zOXhR/mU2FvppkZhFwvPHsrzF/qvWjjci/4k5ZlKFZ71bOXbCj8RWISp3TGQ39O+EXCeAuvWWwLIgPYud170LjIl9AH3WUruVH4SyAEoQfbtLWTyZFNXBp+mDnhVsLm9f7j8/UT74ix4xf40cR+dJet5ELhL74LSg/2E3u3cUn1d6jBW1u/316jFqIjcgDvuhHczel85qCzaSh6C/vTbB7JhcJffOd7D7ZlCWxczsTqvXAWGbjBtIV55ohruGTdZKIOwgaHDNHGYnyS0WweyYXCX3znSw82vpnK++946+/EmIXBRWIPQnDUGTDrK+wXnUDNpvRr8CRKnCY6orqwn2Q0m0eypfAX3xW9B9veDHfN9TZTIWltq0OnwujjAOu3hWIDZNTG5GmiA7ZtFAkIhb8EQmIPtqAlk/Zm+P13IbI7tn1i0gonxy9IO3snk15209ZOogmLJYZCplq8BJLCXwommxAvyOBvezOsvRd2vQ4vPw6RXiDqlXXCI+Ajl8Pf/pSXufrxElZ3b5SQGTeefYx6/RJICn8piGxDPO+Dv+3NsOQsr6cPCUsth7DDT4KTrs3rJukahJVSofCXgsg2xPM++Nu2Mlbbj3EQweimim0Tr2RiHoM/ToOwUgoU/lIQ2YZ43nvO9bMhXAOR3TighzD3R07ioehsTt5Vz8TcfrpIydJOXlIwRb9rNz59s352/1JOezOv/3EJW3fs4t9fn0ZrZALVg5SignC3sUi+aCcvKbqilT/iA7qr74ZoxOvpL1zRdwFojU7gsxs+TXdvlKpwiAtm1DFvWl3a4E83VqGLgpQThb+Utvic/d736ZuzH+n2PgHEwj9x/CESiTJm/w+kDe90YxWt27q48Kd7bvK691ItoialTds4Sk5at3Wx+JkttG7rKs4btjfDypu9r5AwoBsvX5rX86+f3feSQbdhTJLu3GUvdtDdG8UB3b1Rlr3YUZjfT6RI1POXrOU6J3/YZZTEO3PjpZ2+Ad1uCFXB8RfBlIv61fyHGkRObkeqc5NHxkpjpEwkPYW/ZC2XOflZXTjivXwX2VPamX2VdxFINdCbIN34Q7p2JJ87b1odv2pppyfiqA4b86bVZfR7igSVwl+ylsuc/KwuHIm9/MTSztgZWd+olWk7GsbVcu9lJ2jAV8qGwl+ylsuc/EEvHOmmbI6dkVEvfziGcwHTzVtSTgo2z9/MvglcCuyIHfoX59yjseeuBb4IRID/7Zz73VA/T/P8y0/Kmn+qun6OIT/U2IKmcEo582ue//91zv0wqSFHA/OBycChwJNmdqRz8UXUpVL060nHe/tvdwys6yeE/3CDOl1NP/nnKPSl0vhR9jkbuM85txt41cy2ADOA531oiwRBYm8/FPZm7UQZMGUzm0HiVDV9oOjbRurThQRNocP/y2a2AGgBrnLOdQFjgKaEczpix6TS9PX22/f09qNAwwLYb+yAun42g8SpavrF3jYyKHsUiyTKKfzN7EngkBRPXQf8J/BtvCnR3wZuBr4AA7dIJc20aTO7DLgM4LDDDsulqRI0/Xr7VV6PP97bT5qnH5fN7KJ0g9LF3DbS9z2KRVLIKfydc3MyOc/Mfgo8HHvYAYxNeLoOeC3Nz78duB28Ad/sWyqBkba3vxD2qxtyrn42s4uSa/rFXnPflz2KRYZQyNk+o51z22Pf/xPwEefcfDObDNyDV+c/FHgKmDDUgK9m+5SweOB/YBQ8ds2e3j4u5UJs5Ug1f/GLH7N9vm9mU/FKOm3A5QDOufVmdj+wAegFrtRMnzKVvNqmmbdvrotm3NsvF5pRJEFTsPB3zl08yHM3ATcV6r0lAFKttulCEArRt/jalAvLPvRFgkp3+EphpFpts2oEnPY9eK+zInr7IkGm8JfCyGC1TRHxj8JfclPEdXiGokFVkcwp/MuEL8E31Do8Oay2OVy6kUpkeBT+ZcC34Eu1vv4QYV+oi5RupBIZHoV/GfAt+NKtr59GIS9SupFKZHgU/mWg4MEXn6+P6z9oO8y6fiEvUsW+a1ek1Cn8y0Chgq91Wxevrn6Gc/90ORbtBsC9uJTQ5x/pfwHIsK5f6IuUbqQSyZzCv0zkO/gef2wFO55dwtG0Qagbiy3HF4308Nc1jzMmi4Fc9c5FgkPhLwNsWvUkH3/+89SEevuOxZeA6iXM85GjOS/Ln63euUgwKPxlgK4NT3MkvX29/YiDl9yHWR+t5zf2ca4+/mR/GygiOVP4ywC1R59Cz9b/osZ5PX8XqmHkGd/nrV31XK1yjUhZUPjLABOnz2ET/03Pi/dwyL4jOOjEzzNx7AwmZvnzdOetSPAo/CWlidPnwPSM9uoZlO68FQmmkN8NkPKWbgN1EfGXwl8KKj63P2zozluRAFHZRwpKc/tFgknhLwWnuf0iwaOyTzlqb4aVN3tfRURSUM+/XMQ3VfnAKHjsmvRr7GdBUzVFyo/CvxwkbqpiBtEoEM14jf3BaKqmSHlS2accJG6q4qJELUSUMNFQ9ZBr7A9FUzVFypN6/uUgYVOVaKiaG7o/x75uJ62RyVwdnUBDDj9amx11xVcAAAYnSURBVKSIlCeFf4BkXVtP2FRlWed4lr4wgqiDsJHzhimaqilSnhT+AZFYW68KhzivoY550+oAMgve2KYq47d1UdPalNeeuqZqipQfhX9AJNbWu3uj3PvCX/hVSztT7WWms54fPD2Zqy9ZMGQIq6cuIplQ+AdEvLa+uyeKAxxwTHQzd9X8K9X00sODPLJ6LA3jzh3yZ6mnLiJDUfgHRLzHvuzFDl5ufYrpbGCMvUE1vVRZFFwvJ4Q3AEOHv4jIUBT+AdIQepmG6nuJ1twN0V4IVQFVRKMRQlXVjJn6Sb+bKCJlQuEfFPEbtXrfJ0Rsw1wXgYYFsN9YbzpnjnfqiojEKfyDIn6jVjz4MW/u/pSLFPoikncK/6BIuFGLUBUcf5GCX0QKJqflHczsfDNbb2ZRM2tMeu5aM9tiZpvN7FMJxxvM7KXYcz8yM8ulDWUjfqPWKdfBoofhrFsU/CJSMLn2/NfhTT/5r8SDZnY0MB+YDBwKPGlmRzrnIsB/ApcBTcCjwGnAb3NsR3mI3aglIlJoOfX8nXMbnXObUzx1NnCfc263c+5VYAsww8xGA/s65553zjngF8BncmmDiIgMX6FW9RwDtCc87ogdGxP7Pvl4SmZ2mZm1mFnLjh07CtJQEZFKNGTZx8yeBA5J8dR1zrnl6V6W4pgb5HhKzrnbgdsBGhsb054nIiLDM2T4O+fmZPFzO4CxCY/rgNdix+tSHBcRkSIqVNlnBTDfzEaY2XhgAtDsnNsO7DSzmbFZPguAdJ8eRESkQHKd6nmOmXUAJwCPmNnvAJxz64H7gQ3AY8CVsZk+AFcAd+ANAr+CZvqIiBSdeZNugq+xsdG1tLT43QwRkZJiZq3Oucbk49rDV0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAIp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAKVf/i3N8PKm72vIiIC5L6Hb7C1N8NdcyHSDeEab4N07ZErIlLmPf+2lV7wu4j3tW2l3y0SEQmE8u75188mGqqGCBCqJlQ/2+8WiYgEQlmHf2t0Aj/o/hca3HpaI5O5OjqBBr8bJSISAGUd/k1bO2nuPYImdwRh8x43jKv1u1kiIr4r65r/zMNHUVMVImxQXRVi5uGj/G6SiEgglHXPv2FcLUsvmUnT1k5mHj5KvX4RkZiyDn/wLgAKfRGR/sq67CMiIqkp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQOef8bkNGzGwHsM3vdhTYgcAbfjciQPT36E9/j/709+gv3d9jnHPuoOSDJRP+lcDMWpxzjX63Iyj09+hPf4/+9Pfob7h/D5V9REQqkMJfRKQCKfyD5Xa/GxAw+nv0p79Hf/p79Desv4dq/iIiFUg9fxGRCqTwFxGpQAr/gDGzH5jZJjP7k5k9aGb7+90mP5nZ+Wa23syiZlax0/rM7DQz22xmW8zsGr/b4ycz+5mZvW5m6/xuSxCY2Vgze8bMNsb+W/lKJq9T+AfPE8AxzrnjgD8D1/rcHr+tA84F/uB3Q/xiZmFgMXA6cDRwoZkd7W+rfLUEOM3vRgRIL3CVc24SMBO4MpP/fyj8A8Y597hzrjf2sAmo87M9fnPObXTObfa7HT6bAWxxzm11znUD9wFn+9wm3zjn/gC86Xc7gsI5t90592Ls+53ARmDMUK9T+AfbF4Df+t0I8d0YoD3hcQcZ/MctlcfM6oHjgReGOrfst3EMIjN7EjgkxVPXOeeWx865Du/j3NJits0Pmfw9KpylOKY52tKPme0N/Br4qnPunaHOV/j7wDk3Z7DnzWwhcBbwCVcBN2IM9fcQOoCxCY/rgNd8aosEkJlV4wX/Uufcskxeo7JPwJjZacA/A3Odc+/63R4JhFXABDMbb2Y1wHxghc9tkoAwMwPuBDY65/4909cp/IPnx8A+wBNmtsbMbvO7QX4ys3PMrAM4AXjEzH7nd5uKLTYB4MvA7/AG8+53zq33t1X+MbN7geeBo8ysw8y+6HebfDYLuBg4JZYZa8zsjKFepOUdREQqkHr+IiIVSOEvIlKBFP4iIhVI4S8iUoEU/iIiFUjhLyJSgRT+IiIV6H8Ak8En/idUJxAAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Vis\n", + "fig, ax = plt.subplots()\n", + "ax.plot(X_pt.cpu().numpy(), y_pt.cpu().numpy(), \".\", label=\"data\")\n", + "ax.plot(X_pt.cpu().numpy(), y_hat.cpu().numpy(), \".\", label=\"pred\")\n", + "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", + "ax.legend();" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# The Solution" + "# PyTorch Solution" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 282, "metadata": {}, "outputs": [], "source": [ @@ -93,7 +395,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 290, "metadata": {}, "outputs": [], "source": [ @@ -108,45 +410,112 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 284, "metadata": {}, "outputs": [], "source": [ + "# define model, loss function and optmizer\n", "model = LinReg(n_features).to(DEVICE) # <-- here\n", "loss_fn = nn.MSELoss()\n", "optimizer = optim.SGD(model.parameters(), lr=0.1)\n", "\n", - "\n", + "# move to CUDA if available\n", "X, y = X.to(DEVICE), y.to(DEVICE) # <-- here" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 285, "metadata": {}, "outputs": [], "source": [ - "# Train step\n", - "for _ in range(50):\n", + "def train(model, X, y, epochs=50):\n", " model.train() # <-- here\n", - " optimizer.zero_grad()\n", - "\n", - " y_ = model(X)\n", - " loss = loss_fn(y_, y)\n", + " loss_history = []\n", + " for _ in range(epochs):\n", + " optimizer.zero_grad()\n", "\n", - " loss.backward()\n", - " optimizer.step()\n", + " y_ = model(X)\n", + " loss = loss_fn(y_, y)\n", + " \n", + " loss_history.append(loss.item())\n", "\n", - "# Eval\n", - "model.eval() # <-- here\n", - "with torch.no_grad():\n", - " y_ = model(X) \n", + " loss.backward()\n", + " optimizer.step()\n", "\n", + " return loss_history" + ] + }, + { + "cell_type": "code", + "execution_count": 286, + "metadata": {}, + "outputs": [], + "source": [ + "def evaluate(model, X):\n", + " model.eval() # <-- here\n", + " with torch.no_grad():\n", + " y_ = model(X) \n", + " return y_" + ] + }, + { + "cell_type": "code", + "execution_count": 287, + "metadata": {}, + "outputs": [], + "source": [ + "loss_history = train(model, X, y, epochs=50)\n", + "y_hat = evaluate(model, X)" + ] + }, + { + "cell_type": "code", + "execution_count": 288, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5xV5X3v8c93LsxwmwFhGGAAuQQvgIoRjT1eYjRGYtNg2pME28RLzCFa2yZNetGkl7Sntj1trK1tY2qiVV/xElu12h7NkUMuxhOjDoqioAKCMIDMIMJwHZiZ3/ljrYEt7mGGubBn9vq+X9mvvfaz1trrWWC+e/GsZz2PIgIzM8uGkkJXwMzMjh2HvplZhjj0zcwyxKFvZpYhDn0zswxx6JuZZYhD36wISfqJpC8Wuh428Dj0bcCQtE7SRwtdD7Ni5tA360OSygpdB7MjcejboCDpf0haLWmbpMckTUzLJekWSY2Sdkh6WdKcdN2lklZI2ilpo6Tf6+S7r5L0/yT9Y/odr0m6KGd9taQ7JG1Ov+cvJJUetu8tkrYB38zz/SWSbpC0RtI7kh6UdFy6bqqkkLRI0qb0GF/L2bdC0t+n6zalyxU56xdIWiapOf3++TmHPj6t205JT0oa26u/BCsKDn0b8CRdCPwV8BlgAvAW8EC6+mPA+cAJwCjgs8A76bo7gC9FxEhgDvCjIxzmQ8CbwFjgT4GHO4IZuBtoBT4AnJ4e84t59h0H3JTnu38HuAz4MDAReBf458O2+QgwM/3uG3Kaub4BnA3MBU4DzgL+KP1zOQu4B/j99NzPB9blfOevA1en9RoC5P3Rs4yJCL/8GhAvksD6aJ7yO4C/yfk8AjgATAUuBN4gCcaSw/ZbD3wJqOriuFcBmwDllD0HfB6oBVqAoTnrLgd+nLPv+i6+fyVwUc7nCWn9y9JzCOCknPV/A9yRLq8BLs1ZdwmwLl3+F+CWTo75E+CPcj7/JvDDQv8d+1X4l6/0bTCYSHJ1D0BE7CK5mq+LiB8B/0Ry5bxF0u2SqtJNfw24FHhL0k8l/dIRjrExInJHH3wrPe7xQDmwWdJ2SdtJwnZczrYbuqj/8cAjOfuvBNpIflDyfUfHsd937oetm0zyo9CZt3OW95D8WFrGOfRtMNhEEpwASBoOjAE2AkTErRFxBjCbpJnn99Py5yNiAUlA/wfw4BGOUSdJOZ+npMfdQHKlPzYiRqWvqoiYnbNtV0PVbgA+nrP/qIiojIiNOdtMznPs9537Yes2ADO6OLbZezj0baApl1SZ8yoD7gOuljQ3vYn5l8CzEbFO0pmSPiSpHNgN7APaJA2R9BuSqiPiANBMcnXdmXHA70gql/Rp4GTg8YjYDDwJ3CypKr0pO0PSh4/inL4D3CTpeABJNZIWHLbNH0saJmk2STv8D9Ly+4E/SvcZC/wJ8P103R3pn8tFab3qJJ10FPWyDHLo20DzOLA35/XNiFgC/DHwELCZ5Op2Ybp9FfBdkpujb5E0+3wrXfd5YJ2kZuBa4HNHOO6zJDdSt5LcjP3vEdFxQ/gKkhuhK9Lj/DtJu3x3/QPwGPCkpJ3AL0hu/ub6KbAaWAJ8KyKeTMv/AqgHXgaWAy+kZUTEcyQ/ELcAO9LvOB6zI9B7mzHNskfSVcAXI+LcAhx7KrAWKI+I1mN9fMseX+mbmWWIQ9/MLEPcvGNmliG+0jczy5ABPzjU2LFjY+rUqYWuhpnZoLJ06dKtEVFzeHmXoS9pMsn4HuOBduD2iPgHSX8L/Aqwn+SpwKsjYnvaG2El8Hr6Fb+IiGvT7zoDuAsYStI178vRRfvS1KlTqa+v7845mplZStJb+cq707zTCnwtIk4mGd/kekmzgMXAnIg4lWTskxtz9lkTEXPT17U55bcBi0j6Q88EckcENDOzftZl6EfE5oh4IV3eSXIVXxcRT+b0K/4FMOlI3yNpAsnAV8+kV/f3kIw8aGZmx8hR3chNm25OJ3l6MdcXgCdyPk+T9GI6yNV5aVkd0JCzTUNalu84iyTVS6pvamo6miqamdkRdDv0JY0geQz+KxHRnFP+DZImoHvTos3AlIg4HfgqcF866qF4v7zt+RFxe0TMi4h5NTXvuw9hZmY91K3eO+lgVg8B90bEwznlVwKfIBkrPAAiooVkVEIiYqmkNSQjHzbw3iagSRwaLdDMzI6BLq/00+Fm7wBWRsTf5ZTPB/4Q+GRE7Mkpr8mZSm46yQ3bN9PRCndKOjv9ziuAR/v0bMzM7Ii6c6V/DslohcslLUvLvg7cClQAi9NhyDu6Zp4P/LmkVpKhbK+NiG3pftdxqMvmE7z3PoCZmfWzLkM/Ip4mf3v8451s/xBJU1C+dfUkc5X2u7t/vo7jhg/hV06b2PXGZmYZUbTDMDzw/AYeXbax6w3NzDKkaEO/tqqCt5v3FboaZmYDStGG/viqSt7e0VLoapiZDShFG/q1VZW8s7uFA23tha6KmdmAUdShHwFNO321b2bWoWhDf3x1BYDb9c3MchRt6I8bWQlAo0PfzOygog398dVJ6L+9w6FvZtahaEP/uGFDKC8VW9ymb2Z2UNGGfkmJGDeyki2+0jczO6hoQx/8gJaZ2eGKOvTHV1eyxaFvZnZQUYf+uJGVbGl2m76ZWYeiDv3x1ZXsamllV0tr1xubmWVAUYd+bVXygJabeMzMEkUe+klffffgMTNLFHXoj+8I/Z0OfTMz6N4cuZMl/VjSSkmvSvpyWn6cpMWSVqXvo3P2uVHSakmvS7okp/wMScvTdbemc+X2m44rfQ+xbGaW6M6VfivwtYg4GTgbuF7SLOAGYElEzASWpJ9J1y0EZgPzgW93TJQO3AYsIpksfWa6vt8MryhjZEWZ2/TNzFJdhn5EbI6IF9LlncBKoA5YANydbnY3cFm6vAB4ICJaImItsBo4S9IEoCoinomIAO7J2affjKuqcOibmaWOqk1f0lTgdOBZoDYiNkPywwCMSzerAzbk7NaQltWly4eX96vx1ZV+KtfMLNXt0Jc0AngI+EpENB9p0zxlcYTyfMdaJKleUn1TU1N3q5hXbVUljX5Ay8wM6GboSyonCfx7I+LhtHhL2mRD+t6YljcAk3N2nwRsSssn5Sl/n4i4PSLmRcS8mpqa7p5LXrVVyVAM7e15f1/MzDKlO713BNwBrIyIv8tZ9RhwZbp8JfBoTvlCSRWSppHcsH0ubQLaKens9DuvyNmn34yvqqS1Pdi2Z39/H8rMbMAr68Y25wCfB5ZLWpaWfR34a+BBSdcA64FPA0TEq5IeBFaQ9Py5PiLa0v2uA+4ChgJPpK9+1fFU7ts79jF2REV/H87MbEDrMvQj4mnyt8cDXNTJPjcBN+UprwfmHE0Fe+vgU7nN+5hTV30sD21mNuAU9RO5cGjaRI+2aWaWgdAfO6ICCXfbNDMjA6FfXlrC2BEVHnTNzIwMhD4kN3M96JqZWUZCf3xVJW/7St/MLBuhX1tVSeNO38g1M8tM6G/bvZ+W1rauNzYzK2KZCP2OyVQ8Bo+ZZV0mQr+2+tADWmZmWZaN0O8YisGhb2YZl4nQPzhXrpt3zCzjMhH61UPLGVJW4uYdM8u8TIS+JPfVNzMjI6EPSROPr/TNLOsyE/qeIN3MLEOhn1zptxDhaRPNLLsyE/q1VZXsPdBG877WQlfFzKxgshP6fkDLzKxbE6PfKalR0is5ZT+QtCx9reuYO1fSVEl7c9Z9J2efMyQtl7Ra0q3p5OjHzPgqh76ZWXcmRr8L+Cfgno6CiPhsx7Kkm4EdOduviYi5eb7nNmAR8AvgcWA+x2Bi9A65E6SbmWVVl1f6EfEUsC3fuvRq/TPA/Uf6DkkTgKqIeCaSO6n3AJcdfXV7rmOCdA+xbGZZ1ts2/fOALRGxKqdsmqQXJf1U0nlpWR3QkLNNQ1qWl6RFkuol1Tc1NfWyionK8lKqh5b7St/MMq23oX85773K3wxMiYjTga8C90mqAvK133fadzIibo+IeRExr6amppdVPGR8VaUHXTOzTOtOm35eksqAXwXO6CiLiBagJV1eKmkNcALJlf2knN0nAZt6euyeqq2upNGhb2YZ1psr/Y8Cr0XEwWYbSTWSStPl6cBM4M2I2AzslHR2eh/gCuDRXhy7R2pHVvhK38wyrTtdNu8HngFOlNQg6Zp01ULefwP3fOBlSS8B/w5cGxEdN4GvA74HrAbWcAx77nQYX11J084WWtvaj/WhzcwGhC6bdyLi8k7Kr8pT9hDwUCfb1wNzjrJ+faq2qpL2gHd27z/Ym8fMLEsy80QuHOq26R48ZpZVmQp9P5VrZlmXqdDveCrXoW9mWZWp0B8zooLSErkHj5llVqZCv7REjBtZ4QnSzSyzMhX6AOM8baKZZVjmQn98VYV775hZZmUu9OtGDaPh3b20t3vaRDPLnsyF/rSa4ew90MaWnb7aN7PsyVzozxg7HIC1TbsLXBMzs2Mvc6E/rSYJ/Te3OvTNLHsyF/q1IysZWl7Km77SN7MMylzol5SIaWOHs3brrkJXxczsmMtc6EPSxLPWzTtmlkGZDP3pY4ez4d297G/1uPpmli3ZDP2a4bS1B+u37Sl0VczMjqlMhv60sSMA3MRjZpnTnekS75TUKOmVnLJvStooaVn6ujRn3Y2SVkt6XdIlOeVnSFqerrs1nSu3IKaNSbttNvlmrpllS3eu9O8C5ucpvyUi5qavxwEkzSKZO3d2us+3OyZKB24DFpFMlj6zk+88JqqHlTNm+BBf6ZtZ5nQZ+hHxFLCtq+1SC4AHIqIlItaSTIJ+lqQJQFVEPBMRAdwDXNbTSveF6TXD/YCWmWVOb9r0f0vSy2nzz+i0rA7YkLNNQ1pWly4fXp6XpEWS6iXVNzU19aKKnZs2drgf0DKzzOlp6N8GzADmApuBm9PyfO30cYTyvCLi9oiYFxHzampqeljFI5s2dgRbd7XQvO9Av3y/mdlA1KPQj4gtEdEWEe3Ad4Gz0lUNwOScTScBm9LySXnKC2Z6OgbPOjfxmFmG9Cj00zb6Dp8COnr2PAYslFQhaRrJDdvnImIzsFPS2WmvnSuAR3tR716bPrajB49D38yyo6yrDSTdD1wAjJXUAPwpcIGkuSRNNOuALwFExKuSHgRWAK3A9RHRln7VdSQ9gYYCT6SvgpkyZhgl8mibZpYtXYZ+RFyep/iOI2x/E3BTnvJ6YM5R1a4fVZSVMmn0MHfbNLNMyeQTuR2SHjx+QMvMsiPzob92626SRwfMzIpfpkN/Rs1w9uxvo3FnS6GrYmZ2TGQ69DsGXlvjJh4zy4hsh37aV983c80sKzId+hOqKqksL2Gt++qbWUZkOvRLSsTUMZ460cyyI9OhDx5t08yyxaE/dgTrt+3hQJvnyzWz4pf50J82Npkvd4PnyzWzDHDo13jgNTPLjsyHfsdom76Za2ZZkPnQHzVsCMcNH+KbuWaWCZkPffDAa2aWHQ59Dg28ZmZW7Bz6JH31G3e2sKultdBVMTPrVw59cm7mugePmRW5LkNf0p2SGiW9klP2t5Jek/SypEckjUrLp0raK2lZ+vpOzj5nSFouabWkW9O5cgeEjtE239zqdn0zK27dudK/C5h/WNliYE5EnAq8AdyYs25NRMxNX9fmlN8GLCKZLH1mnu8smOPHDENyt00zK35dhn5EPAVsO6zsyYjoaAD/BTDpSN8haQJQFRHPRDJN1T3AZT2rct+rLC+lbtRQP6BlZkWvL9r0vwA8kfN5mqQXJf1U0nlpWR3QkLNNQ1qWl6RFkuol1Tc1NfVBFbvmHjxmlgW9Cn1J3wBagXvTos3AlIg4HfgqcJ+kKiBf+32nE9NGxO0RMS8i5tXU1PSmit02o2aE58s1s6LX49CXdCXwCeA30iYbIqIlIt5Jl5cCa4ATSK7sc5uAJgGbenrs/jC9Zji7Wlp5u3lfoatiZtZvehT6kuYDfwh8MiL25JTXSCpNl6eT3LB9MyI2AzslnZ322rkCeLTXte9Dc+qqAXhpw/YC18TMrP90p8vm/cAzwImSGiRdA/wTMBJYfFjXzPOBlyW9BPw7cG1EdNwEvg74HrCa5F8AufcBCm72xCqGlJbwokPfzIpYWVcbRMTleYrv6GTbh4CHOllXD8w5qtodQxVlpZw8sYpl6x36Zla8/ERujtMnj2L5xh20ehYtMytSDv0ccyePYs/+Nt7Y4idzzaw4OfRzzJ08CoBlbtc3syLl0M9x/JhhjB5WzrIN7xa6KmZm/cKhn0MSp00e5St9MytaDv3DzJ08ilWNu9i570Chq2Jm1ucc+oc5fcpoImB5w45CV8XMrM859A8zd1JyM9cPaZlZMXLoH6Z6WDnTxw53u76ZFSWHfh5z05u5HnHTzIqNQz+PuVNG0bSzhU07POKmmRUXh34eBx/S8jg8ZlZkHPp5nDS+iiFlJby43g9pmVlxcejnMaSshDkTq3wz18yKjkO/E3Mnj2b5xh0c8IibZlZEHPqdOH3KKFpa23n97Z2FroqZWZ9x6Hei42auH9Iys2LSnekS75TUKOmVnLLjJC2WtCp9H52z7kZJqyW9LumSnPIzJC1P192azpU7YE0aPZSxI4a4B4+ZFZXuXOnfBcw/rOwGYElEzASWpJ+RNAtYCMxO9/l2x0TpwG3AIpLJ0mfm+c4BRVL6kJZ78JhZ8egy9CPiKWDbYcULgLvT5buBy3LKH4iIlohYSzIJ+lmSJgBVEfFMJI+53pOzz4A1d/Io1jTtZsdej7hpZsWhp236tRGxGSB9H5eW1wEbcrZrSMvq0uXDywe0uZOTVquXG9zEY2bFoa9v5OZrp48jlOf/EmmRpHpJ9U1NTX1WuaN16uRqJD+Za2bFo6ehvyVtsiF9b0zLG4DJOdtNAjal5ZPylOcVEbdHxLyImFdTU9PDKvZeVWU5M2pGuAePmRWNnob+Y8CV6fKVwKM55QslVUiaRnLD9rm0CWinpLPTXjtX5OwzoJ3uETfNrIh0p8vm/cAzwImSGiRdA/w1cLGkVcDF6Wci4lXgQWAF8EPg+ohoS7/qOuB7JDd31wBP9PG59Iu5U0axbfd+NmzbW+iqmJn1WllXG0TE5Z2suqiT7W8CbspTXg/MOaraDQAfnJLczP3F2neYMmZYgWtjZtY7fiK3CyeNH8nE6koWr9hS6KqYmfWaQ78LkvjorFp+tqqJvfvbut7BzGwAc+h3w8Wzatl3oJ2nV28tdFXMzHrFod8NH5o2hpEVZSxe8Xahq2Jm1isO/W4YUlbCBSeNY8nKRtra3XXTzAYvh343XTyrlnd27/cUimY2qDn0u+mCE2soL5V78ZjZoObQ76aqynLOnj7GoW9mg5pD/yhcPKuWN7fuZnXjrkJXxcysRxz6R+GjJ9cC+GrfzAYth/5RmDhqKHPqqtx108wGLYf+Ubr45PG8uGE7TTtbCl0VM7Oj5tA/ShfPqiUClqx0E4+ZDT4O/aN08oSR1I0a6nZ9MxuUHPpHSRIXz6rl6dVb2bO/tdDVMTM7Kg79HvjYrFpaWtt56g0PwGZmg4tDvwfOnHYcVZVlbuIxs0HHod8D5aUlXHjSOH702hZa29oLXR0zs27rcehLOlHSspxXs6SvSPqmpI055Zfm7HOjpNWSXpd0Sd+cQmFcPGs87+45wNK3PACbmQ0ePQ79iHg9IuZGxFzgDGAP8Ei6+paOdRHxOICkWcBCYDYwH/i2pNLeVb9wPnxiDUPKSnjspU2FroqZWbf1VfPORcCaiHjrCNssAB6IiJaIWAusBs7qo+MfcyMqylhw2kQefmEj2/fsL3R1zMy6pa9CfyFwf87n35L0sqQ7JY1Oy+qADTnbNKRl7yNpkaR6SfVNTU19VMW+d81509h7oI37nltf6KqYmXVLr0Nf0hDgk8C/pUW3ATOAucBm4OaOTfPsnncaqoi4PSLmRcS8mpqa3lax35w0vopzPjCGe37+Fgd8Q9fMBoG+uNL/OPBCRGwBiIgtEdEWEe3AdznUhNMATM7ZbxIw6BvErzl3Gm837+Px5ZsLXRUzsy71RehfTk7TjqQJOes+BbySLj8GLJRUIWkaMBN4rg+OX1AXnDCO6TXDufPptUR4/lwzG9h6FfqShgEXAw/nFP+NpOWSXgY+AvwuQES8CjwIrAB+CFwfEW29Of5AUFIirj5nGi817HD3TTMb8HoV+hGxJyLGRMSOnLLPR8QpEXFqRHwyIjbnrLspImZExIkR8URvjj2Q/NoH66geWs4dT68tdFXMzI7IT+T2gWFDyvj1D03h/7z6Nhu27Sl0dczMOuXQ7yNX/NLxlEjc9fN1ha6KmVmnHPp9ZEL1UC49ZQI/eH4DO/cdKHR1zMzycuj3oWvOncaullYerG8odFXMzPJy6Peh0yaPYt7xo7nr52tpa3f3TTMbeBz6feyac6exYdteFq94u9BVMTN7H4d+H/vY7PFMPm4oty5Z7bH2zWzAcej3sdISccP8k1mxudk9ecxswHHo94NLTxnPhSeN4+Yn36DhXffbN7OBw6HfDyTx5wtmA/Anj77qMXnMbMBw6PeTSaOH8bWPncCPXmvkf3sETjMbIBz6/eiq/zaVOXVV/Nl/rmDHXj+wZWaF59DvR2WlJfz1r57KO7ta+F8/fK3Q1TEzc+j3tzl11Vx9zjTue3Y9z6/bVujqmFnGOfSPga9efAJ1o4by9YeXs7/VfffNrHAc+sfA8Ioy/udls1nVuIt/+emaQlfHzDLMoX+MXHhSLb986gT+YckqnnqjqdDVMbOM6u10ievSqRGXSapPy46TtFjSqvR9dM72N0paLel1SZf0tvKDzV/96inMrB3Jtd9fyssN2wtdHTPLoL640v9IRMyNiHnp5xuAJRExE1iSfkbSLGAhMBuYD3xbUmkfHH/QqKos5+6rz2T0sCF84a7neeud3YWukpllTH807ywA7k6X7wYuyyl/ICJaImItsBo4qx+OP6CNq6rknmvOoq09uOLO59i6q6XQVTKzDOlt6AfwpKSlkhalZbUdk6Gn7+PS8jpgQ86+DWnZ+0haJKleUn1TU/G1f8+oGcEdV53JluZ9XP2vz7O7pbXQVTKzjOht6J8TER8EPg5cL+n8I2yrPGV5B6WJiNsjYl5EzKupqellFQemD04ZzT//+gdZsbmZa7+/1F05zeyY6FXoR8Sm9L0ReISkuWaLpAkA6XtjunkDMDln90nApt4cf7C76ORa/vJTc/jZqq384UMve7YtM+t3PQ59ScMljexYBj4GvAI8BlyZbnYl8Gi6/BiwUFKFpGnATOC5nh6/WHz2zCl87eITeOTFjVz1r8/x7u79ha6SmRWxsl7sWws8Iqnje+6LiB9Keh54UNI1wHrg0wAR8aqkB4EVQCtwfUS09ar2ReK3L5pJzcgK/uTRV/nEPz7Ndz53BqdMqi50tcysCGmgj/U+b968qK+vL3Q1jomXNmznuu8vZevu/fzFgjl85szJXe9kZpaHpKU5XekP8hO5A8hpk0fxn799LmdOHc0fPPQyNz68nJZW/2PIzPqOQ3+AGTOignu+8CF+84IZ3P/cej7znWdY3bir0NUysyLh0B+ASkvEH8w/ie987gze3LqbS/7+Kb7xyHKadvpBLjPrnd7cyLV+Nn/OeM6cOppbl6zi3mfX8x8vbuTaD8/gi+dNZ+iQTI1gYWZ9xFf6A9yYERX82YI5PPm753PezBpuXvwGF3zrxzz4/Ab36zezo+beO4NM/bpt3PT4Sl5cv52J1ZV8et5kPj1vEpNGDyt01cxsAOms945DfxCKCJ5csYV7n13Pz1YlYxOd+4GxfPbMyVw8q5aKMjf9mGWdQ79INby7h3+rb+Df6jewacc+Rg8r59JTJvDhE2r4pRljGFlZXugqmlkBOPSLXFt78PTqrfzg+fX85PUm9uxvo6xEfHDKaM6bOZbzTqjhlLpqSkvyjXtnZsXGoZ8hLa1tvPDWdn62qomnVjXxysZmAEZWlDG7rorZE6uZU1fFnInVTK8Z4R8CsyLk0M+wd3a18PTqrTy7dhuvbmrmtc3NtKRDOQ8tL+XE8SOZNnY4x48ZxvFjhjHluGR5zPAhpGMrmdkg49C3g1rb2lnTtJtXNu7g1U3NrNzczFvv7GZz8z5y/3MYPqSU8dWVjBtZSc3ICsaNrGBcVQXjRlZy3PAhjBpWTvXQckYNHcLIyjJK/C8GswGjs9D3w1kZVFZawonjR3Li+JH82hmHyvcdaKPh3b289c5u3npnD+u37WFL8z4ad7bw4oZ3aWxuOfgvhMNJyRzAVUPLGD6kjOEV6WtIKcMryhg2pJTK8lIqy0qoKE+Xy0uoLCulvKyEIaUlDCkT5aXJcnlZCeUlJZSWiPJSpe/J57ISUVIiSiVKS9P3ElEiUaLkiWb/C8UsP4e+HVRZXsoHxo3gA+NG5F0fEexsaaWxuYV39+xnx54DbN97gO179tO8N1lu3nuA3fvb2N3Syo69B9i0fS97WlrZvb+NfQfaOv3R6A8lIvkhKBEiWZYOvQvQYcvJP1aSsmSJdP2h7Ui35eDye9/Tb3jf+o7vO1Se/4fpiD9Xnaw82p+4wfajOLhq23f+63fO7fMu2A596zZJydV8L7qBRgQtre20HGhnX2vyQ3CgrZ39rZG8t7VzoLWdlrZ22tqC1vZ2WtuD1rZI35PP7RG0tee8ImhvD9oD2iN5jzi0juR/tLdH8h5BpNsEJMskZYcedO7Y5tC6pLTjXJLy9xS+d5Hc5tP3lnfy59PFn93R7tM3OxRWDLYK9yH1w8+dQ9+OKUlp004p1fgZArNjzWPvmJllSG/myJ0s6ceSVkp6VdKX0/JvStooaVn6ujRnnxslrZb0uqRL+uIEzMys+3rTvNMKfC0iXkgnSF8qaXG67paI+FbuxpJmAQuB2cBE4P9KOsHz5JqZHTs9vtKPiM0R8UK6vBNYCdQdYZcFwAMR0RIRa4HVwFk9Pb6ZmR29PmnTlzQVOB14Ni36LUkvS7pT0ui0rA7YkLNbA538SEhaJKleUn1TU1NfVNHMzOiD0Jc0AngI+EpENAO3ATOAucBm4OaOTfPsnrcvVkTcHhHzImJeTU1Nb6toZmapXoW+pHKSwL83Ih4GiIgtEeNnfSgAAAN0SURBVNEWEe3AdznUhNMATM7ZfRKwqTfHNzOzo9Ob3jsC7gBWRsTf5ZRPyNnsU8Ar6fJjwEJJFZKmATOB53p6fDMzO3o9HnBN0rnAz4DlQMez9V8HLidp2glgHfCliNic7vMN4AskPX++EhFPdOM4TcBbPaokjAW29nDfwcznnS0+72zp7nkfHxHvax8f8KNs9oak+nyjzBU7n3e2+Lyzpbfn7SdyzcwyxKFvZpYhxR76txe6AgXi884Wn3e29Oq8i7pN38zM3qvYr/TNzCyHQ9/MLEOKMvQlzU+Hb14t6YZC16c/peMbNUp6JafsOEmLJa1K30cf6TsGoyMM7V3U5y6pUtJzkl5Kz/vP0vKiPm8ASaWSXpT0X+nnoj9nAEnrJC1Ph6qvT8t6fO5FF/qSSoF/Bj4OzAIuT4d1LlZ3AfMPK7sBWBIRM4El6edi0zG098nA2cD16d9zsZ97C3BhRJxG8hDkfElnU/znDfBlktF8O2ThnDt8JCLm5vTP7/G5F13ok4z1szoi3oyI/cADJMM6F6WIeArYdljxAuDudPlu4LJjWqlj4AhDexf1uUdiV/qxPH0FRX7ekiYBvwx8L6e4qM+5Cz0+92IM/W4P4VzEajuGvkjfxxW4Pv3qsKG9i/7c02aOZUAjsDgisnDefw/8AYeGfIHiP+cOATwpaamkRWlZj8+9GCdG7/YQzjb4HT60dzIOYHFLZ5ubK2kU8IikOYWuU3+S9AmgMSKWSrqg0PUpgHMiYpOkccBiSa/15suK8UrfQzjDlo7RTtP3xgLXp1/kG9qbjJw7QERsB35Cck+nmM/7HOCTktaRNNdeKOn7FPc5HxQRm9L3RuARkibsHp97MYb+88BMSdMkDSGZl/exAtfpWHsMuDJdvhJ4tIB16RedDe1NkZ+7pJr0Ch9JQ4GPAq9RxOcdETdGxKSImEry/+cfRcTnKOJz7iBpeDoHOZKGAx8jGa6+x+delE/kSrqUpA2wFLgzIm4qcJX6jaT7gQtIhlvdAvwp8B/Ag8AUYD3w6Yg4/GbvoHaEob2fpYjPXdKpJDfuSkku2h6MiD+XNIYiPu8OafPO70XEJ7JwzpKmk1zdQ9Icf19E3NSbcy/K0Dczs/yKsXnHzMw64dA3M8sQh76ZWYY49M3MMsShb2aWIQ59M7MMceibmWXI/wfjwkNpQlIdhQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(loss_history)\n", + "plt.title('Loss per epoch');" + ] + }, + { + "cell_type": "code", + "execution_count": 289, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3df3hcZZ338fd3Jg0FKVIKCktKU6TQUqDQpKVY6iW/tApP+SULFKQsKi6y1+LKw/PgosLqshe7wrq61uVB6hbXFhZcoBUUUHC1AiFNKBVKWyklNRHUEkptt9AkM9/njzOTTqYzyUzmx5kfn9d19UrnzJmZu1E+557vfZ/7NndHRETqSyTsBoiISPkp/EVE6pDCX0SkDin8RUTqkMJfRKQOKfxFROqQwl9EpA4p/KWqmVmXmfWZ2cFpx18wMzez5sTjJjP7LzN708y2m9mLZnZl4rnmxLk70/5cnGMb/snMus3sT2a2xcxuSnv+LjPbaGbx5GemPLfIzDoTr+1JvFdDls852sxWmNlWM3vLzB43s2Ny/V2JpFL4Sy14Dbg0+cDMjgf2TTvnP4BuYBIwAbgC+EPaOQe6+/4pf/4zx89fAkx19wOADwILzeyClOfXAp8Dns/w2v2AzwMHAycDZwD/O8vnHAisBI4B3g+0AytybKPIEAp/qQX/QRDmSYuA76edMwtY6u7/4+4D7r7G3X9SjA93943u/j8ph+LAUSnPL3b3J4F3M7z239x9lbv3ufvvgGXA3Cyf0+7uS9z9LXfvB74BHGNmE4rx75D6ovCXWtAGHGBm08wsClwM/CDDOYvN7BIzOyKfNzezhWb26xHOudHMdgI9wHuA5fl8RooPAevyOPf37t47ys+SOqbwl1qR7P2fBWwAfpf2/EXAKuDLwGuJMYFZaee8aWZvp/yZBuDuy939hOE+3N1vA8YBMxNt2Z7vP8DM/gJoBW7P4dwmYDHwhXw/RwQU/lI7/gNYCFzJ3iUf3H2bu9/o7tMJ6uUvAA+bmaWcdrC7H5jyZ30+DfDAGuAd4O/yea2ZnQfcBnzM3d8c4dxDgCeA77j7vfl8jkiSwl9qgrtvIRj4/Tjw4AjnvknQu/4z4KASNKcB+ECuJ5vZfOC7wP9y9xdHOHc8QfCvdPdbC2ql1DWFv9SSTwGnpw2+AmBm/2hmx5lZg5mNA64BNhVaLzeziJl91szGW2A2cC3wZMo5jWY2FjBgjJmNNbNI4rnTCQZ5L3T39hE+6wDgceBpd7+xkHaLKPylZrj7q+7ekeXp/YCHgLeBzQRTPheknfN22jz/LwCY2WVmNtwg7PnAq8AOgoHmf038SXqCoBT0QeCuxN8/lHjuy8B7gR+nfO7gLCQz+4mZ/W3K58wC/iKtnXkNYIsAmDZzERGpP+r5i4jUIYW/iEgdUviLiNQhhb+ISB3KuHpgJTr44IO9ubk57GaIiFSVzs7ON939kPTjVRP+zc3NdHRkm8UnIiKZmNmWTMdV9hERqUMKfxGROqTwFxGpQ1VT88+kv7+fnp4e3n13rz0yasrYsWNpampizJgxYTdFRGpEVYd/T08P48aNo7m5maEr89YOd6e3t5eenh4mT54cdnNEpEZUddnn3XffZcKECTUb/ABmxoQJE2r+242IlFdVhz9Q08GfVA//RpFq07llG4t/vonOLdvCbsqoVHXZR0QkDJ1btnHZ3W30DcRpbIiw7NNzaJk0Puxm5aXqe/6V5JZbbuH227Nvv/rwww/z8ssvl7FFIlIKbZt76RuIE3foH4jTtrmgPYFCofAvI4W/SG2Yc+QEGhsiRA3GNESYc+SEsJuUt7oL/2LX6W699VaOOeYYzjzzTDZu3AjAd7/7XWbNmsWMGTO48MIL2bVrF8888wwrV67khhtu4MQTT+TVV1/NeJ6IVL6WSeNZ9uk5fOEjx1RlyQfqLPyTdbo7ntjIZXe3FXwB6Ozs5L777mPNmjU8+OCDrF69GoALLriA1atXs3btWqZNm8aSJUv44Ac/yIIFC/j617/OCy+8wAc+8IGM54lIdWiZNJ5rTzuqKoMf6mzAN1OdrpD/4VatWsX555/PfvvtB8CCBcGWsC+99BJf+tKXePvtt9m5cycf/ehHM74+1/NERIqtrnr+pajTZZqGeeWVV/Ltb3+bF198kZtvvjnrHP1czxMRKba6Cv9i1+k+9KEP8dBDD/HOO++wY8cOfvSjHwGwY8cODjvsMPr7+1m2bNng+ePGjWPHjh2Dj7OdJyJSanVV9oHgAlCsGt3MmTO5+OKLOfHEE5k0aRLz5s0D4Gtf+xonn3wykyZN4vjjjx8M/EsuuYTPfOYzfOtb3+KHP/xh1vNERErN3D3sNuSktbXV0zdzWb9+PdOmTQupReVVT/9WESkeM+t099b043VV9hERkUBRwt/MvmdmfzSzl1KOHWRmPzWzVxI/x6c890Uz22RmG81MU1xERMqsWD3/pcD8tGM3Ak+6+xTgycRjzOxY4BJgeuI13zGzaJHaISIiOShK+Lv7L4G30g6fC9yT+Ps9wHkpx+9z993u/hqwCZhdjHaIiEhuSlnzf7+7vwGQ+Pm+xPHDge6U83oSx0REpEzCGPDNtDh9xilHZna1mXWYWcfWrVtL3CwRkfpRyvD/g5kdBpD4+cfE8R5gYsp5TcDrmd7A3e9y91Z3bz3kkENK2NTKsf/++4fdBBGpA6UM/5XAosTfFwErUo5fYmb7mNlkYArQXsJ2hC4Wi4XdBBGRIYo11fNe4FngGDPrMbNPAbcBZ5nZK8BZice4+zrgfuBl4DHgWncvXzp2t8OqO4KfRdDV1cXUqVNZtGgRJ5xwAp/4xCfYtWsXzc3NfPWrX+XUU0/lgQce4NVXX2X+/Pm0tLQwb948NmzYAMBrr73GKaecwqxZs/jyl79clDaJiIykKMs7uPulWZ46I8v5twK3FuOz89LdDvcsgFgfRBth0UqYWPhEo40bN7JkyRLmzp3LVVddxXe+8x0Axo4dy69+9SsAzjjjDO68806mTJnCc889x+c+9zmeeuoprrvuOq655hquuOIKFi9eXHBbRERyUV93+HatCoLfY8HPrlVFeduJEycyd+5cAC6//PLBwL/44osB2LlzJ8888wwXXXQRJ554Ip/97Gd54403AHj66ae59NLg2vnJT36yKO0RERlJfS3s1jwv6PEne/7N84rytunLOicfv+c97wEgHo9z4IEH8sILL+T0ehGpPp1bttG2uZc5R04o3gYv3e1BJ7V5XlGqFKnqq+c/cXZQ6jn9pqKVfAB++9vf8uyzzwJw7733cuqppw55/oADDmDy5Mk88MADALg7a9euBWDu3Lncd999AFrWWaRKFXuXQGBPmfqpW4OfRRqnTKqv8Icg8OddX9Sr6LRp07jnnns44YQTeOutt7jmmmv2OmfZsmUsWbKEGTNmMH36dFasCCY/ffOb32Tx4sXMmjWL7du3F61NIlI+mXYJLFiJytRJ9VX2KZFIJMKdd9455FhXV9eQx5MnT+axxx7b67WTJ08e/NYAcOONN5akjSJSOsldAvsH4kXbJbBUZeokhb+ISIGSuwS2be5l/H6Ngz3/gmr/yTJ1iWr+Cv8CNTc389JLL418oohUlXwHcJPnXHZ3G30DcRobIoVvFztxdtFDP6nqw9/da362TLXstiZSK5IDuPmGeKbaf9Fm/hRZVQ/4jh07lt7e3poOR3ent7eXsWPHht0Ukbox2gHcZO0/ahSv9l8iVd3zb2pqoqenh1pf8XPs2LE0NTWF3QyRujHaAdzU2n9R5/uXQFVv4C4iUioluWkrhDZk28C9qnv+IiKl0jJp/OhDvwh35o523CFXCn8RkWLqWAo/vh48DtF9Rr2aQKkHjxX+IiLF0N0Oa5dD5/eDu3IBYruDbwCjCP+S3DiWQuEvIlKoZG8/PjD0uEVGfWduqQePFf4iIqORrOvvOyFz8Eca4ON3FHSTVkHjDiNQ+IuI5Cu1rm8RiKdsRmhRaFkEMy4t2d25xaDwFxHJR3f70J6+A5HongvBx++A1ivDbGFOFP4iErpKmFOfs65VEI/veRxJBP47vSVZgK1UFP4iEqpSz2cvuuZ5xJNLLVuESJX09NNV9do+IlL9SrIRSjF0t8OqO/baQaszPoWFfX/LHQMXsXDgK3Qecm5IDSyMev4iEqp857OXpUSU3EIxuZFKyo1abZt7aR84ijY/iqhR0St3DkfhLyKhymc+e9lKRJm2UEyEf6lvvioXhb+IhG6k+ezJ3v7rb79TnvXyh9lCsZpW7hyOwl9EKlpqb78hYjREI8RiJe51j7CFYilvvioXhb+IlF0+dfvUAeFY3Ll49kQOP3Df0ve689hCsaqmqiYo/EWkrPKt26fX2C+c2VRRAVt1U1UTFP4iUlb5LlVc6TX2atq3N5XCX0TKajSzZSq5xl6ts3+0jaOIlF16jTzXmnm288KuuYf9+cPRNo4iUjFSe/K51syznVcJNfdK/maSjZZ3EJFQ5bq8Q7bz2jb3Mj22gb+MrOC42IbKWR6iwpW8529mXcAOIAYMuHurmR0E/CfQDHQBf+7u20rdFhGpPLnWzDOe193On//+3/n0mPuJEqefBrbsfzxwVHn/EVWo5DX/RPi3uvubKcf+CXjL3W8zsxuB8e7+f4d7H9X8RWrXqGr+kVeC9XcG3sVxDHCLYqffBPOuL1/jK1yl1fzPBT6c+Ps9wH8Dw4a/iNSuXGvmQ8575F4YeBcSwQ+GpS3FINmVI/wdeMLMHPh/7n4X8H53fwPA3d8ws/dleqGZXQ1cDXDEEUeUoakiUtG622Htcti5FX7zGEG8AJExMPOTFb91YiUpR/jPdffXEwH/UzPbkOsLExeKuyAo+5SqgSJS4ZKh//wPIN6f9qTBzMvhnG+E0rRMKnnqZ1LJw9/dX0/8/KOZPQTMBv5gZoclev2HAX8sdTtEpEolN0uPxxjs6Q8yaBgLMxaG0LDMKmHqaS5KOtXTzN5jZuOSfwc+ArwErAQWJU5bBKwoZTtEpEoN2Sw9LfgjY4LtE1M2WqkEFbszWZpS9/zfDzxkZsnPWu7uj5nZauB+M/sU8FvgohK3Q0RyUHHliq5V4CmbpVsUjvkY7P++iq3vV8tyDyUNf3ffDMzIcLwXOKOUny0i+SlHuSLrxaVjKaxfAdPOHboZevM8iO4Dsd1gEaiCzdIrfSG6JC3vIFLHUsO41KtTZr24dCyFR64LTnr1qeBnMuBH2FSlUlXDcg8Kf5E6lR7GXzlneknLFVkvLmu+P/TENd8f2rvPY1MVyZ3CX6ROpYfxtl19JS1XpNbCZzVs4ryda6D7IzDusKEnpj+WklD4i9SpTAOTpSxXtEwaz8MLxtD//HKm/+FHRJ6Pwdpvw/zbiP/mcSzej0fGEJl7XUk+X4ZS+IvUqdSByfH7NQ5OSSxG+Gcc2O1uZ+rjlw8uyQBArI/fvdHD9f1fpsXX0WnTuSE+hZaCWyAjUfiLFKDipkbmKdnmYs7yyTqw27UKYn3sma9vEG3k2dixtA/sQ5sfRdSomm0Qq53CX2SUquVOzpE8+HwPu/vjOMWZ5ZNcX//kyHraY9No2zwleL/meRBtDC4AkSicdDnMuJTJ8Sk0drZV/Lz4WqPwFxmlat24O1Xnlm080NE92BePRgsP3zP27+KqMf/AGAaGrq+fZdpmC1TFvPhao/AXGaVquZNzOG2bexmIB9FvwLRDxxX2ht3tTN2wGLd+DCdqMaa+uxY4M3g+y7TNapgXX2sU/iKjVC13cg4neQFLfoP5dc92Lru7bXQlrO72YHOV2G4MByJaX7+CaQ9fkQK0TBrPtacdVZXBD3suYHOPOjjYCYs8FiPrbodVdwQ/Yc+ArseDpRg+8OGKW3RN9lDPX6TOtUwaz+fPPJrVXW/lVsLqboe198KaHwTLLEcbg5BPHdCNNsKHv6jgr2AKf5E6l5yueuUpzax740987LjDsn+TGVxbf2DPsVhf0Oufd31VrsNTrxT+InUsdbpq3INB39Vdb3HMoeP2vgAMWVs/KZirP1jX1zo8VUM1f5E6ljpdFUao+WdaWz/EzVQ6t2xj8c830bllW9k/uxao5y9SxQq9wzh9tk8Estf8c1hbv1x3PNfKDXZhUviLVKliBGD6+j7bdvVlD+4R1tYvZyDXwg12YVP4i1Sp1ADs64/zLz/7DZ8/8+hRXQBaJo2Hn94ML94PXZPhzFsyl3KGqemXKpAzfZuohRvswqbwF6lS4/drJGKGuxMHnt70Jqu73sq/x92xFP77H2DnH4LHf3odvjcfrnosY9BnK+2UIpCzfZuohRvswqbwF6lCnVu28dVH1hF3xxJ3Z42qx/1fnwl6++k8FpR38ijtlCKQh/s2oSUhCqPwFymBUg98poZixCAaCb4B5Nzj7m6Hp78JGx7J/LxFMy7LMFJpp9iBrPJO6Sj8RYosW++40AtC6uvTQ/Er50wffrA2qbsd1i6HNcuDWTuZHHo8nP3PGUs+5Q7jkb5NVPt+CmFS+IsUWabeMRS2YUqmC0reJZbBu3Nj7NlQJcUwoZ/qwplNeOJnOQI327cJTfcsjMJfpMgy9Y7zmQmTqTeb6fU5LSjXsRTWr4BDTyD+zLcxH8AGnzSINMBhJ8BJV+w1Zz9Tu1LD9sKZTfn8WopO0z0Lo/AXKbJspYpcyiXZerN5l1vSavr+6lPgYAbu4BYl0roIZlya8925lRa2Gg8ojMJfpATSSxXpFwSAxT/ftFfJJlvA5jWTJrmu/sA7Qw47xoCDE2HVlBs5/Zz/k9e/qdLCVtM9C6PwFymTZIgPV6seLmBznkkzuFH60Mr+kvg5bPf96LTp3HDqFUB+A6aVGLaa7jl6Cn+RMss2IJwM1Vy+IQxKzt7ZuRX2f19Qxkmsq++xPvrjsC4+iQc5nWnnXEf/rj5uSLzXaAZMFba1Q+EvUmbpvfvx+zXuFcLXnnbUyOHcsRQe/ZuhK22uWQZXPgKLVtL21MPcvvEQOuNHEzX4wq4+rj3tqMFTK62GL+WlJZ1FCjCaZYWT5ZMvfOQYln16Dtt29WX9JpDpOBD0+B/9wtDghz0bq0ycTeOHb2BddCpRy7xSZ/IilO15qW3q+UvdG+2NQoXMM08vn2Sq8w87wJq+tn5SysYqI9XoK7GGL+Wj8Je6VkiAj1S7z+dCkCmE9zoeeQVWJZZTHtwvN3mXrsHUs2HudUOmbo5Uo1cNv36FFv5mNh/4JhAF7nb328Jqi9SvQureudTuR/tNYPB45BVa/mc5PLsVXnli6IbpVz4Ca+9l6453+cW+ZzH5pNNomTj852k5BEkKJfzNLAosBs4CeoDVZrbS3V8Ooz1SvwqZu57eMy/6AGp3Oyw9e3Da5qCUDdM741P2XHA624a94Iz0LUcXhvoSVs9/NrDJ3TcDmNl9wLmAwl/KqtC6dy61+7x1twfhvr0bYv1pTw7dMD2fC85w52qdnPoTVvgfDnSnPO4BTk4/ycyuBq4GOOKII8rTMqk7xap7F3QhSQb+vhPgsRuD3n2kASJRiA8E50TGwMzLYcbCwbp+Pt9chjtX0z7rT1jhbxmO7bXMoLvfBdwF0NrammEZQpHKMqoLyeBqm3GIRIJZPB6HONCyiOA/Dcu4Dk8+F5zhzq20pRuk9MIK/x5gYsrjJuD1kNoiUnJZ6+nd7YngT/Tu4x709pPlnRwWXsvngpN1YFnTPutOWOG/GphiZpOB3wGXAAtDaotISWWsp0deSdT1e4bO149E4eN3wDu9QV0/xxU3R9OmTFNLFfr1I5Twd/cBM/sr4HGCqZ7fc/d1YbRFpNTaNveyuz/OSfYbTomvZ/uvXoTX/jlR148Gtfx4P1gkCP4R1tUvlAZ3BUKc5+/uPwZ+HNbnS+2o9CmK4/dr5PaGxZwffToY2HolCuYpdf0r4L0TS9rTT6XBXQHd4StVrhp6sUe/eDst0aeBYLN1JwZEg03So41DZu9kUuyLmwZ3BRT+UuUqtheb3D5x2rkct/0XQLCLFgAO2444g/FT5kDzPDrjU2jLsmzzaC9uw10wijW4W+nfuGR4Cn+pahXZi/3pzfD0vwR/f/Upxh55Or6jC09MVo4R4cmDLuUT8y4YMdxHc3HL5YJR6OBuNXzjkuFpSWepaunLI4ceQB1L9wR/0u7t/P74v+QNP4jn4lO5PH4Lk086DRhh2WZGt+zySO9ZDOX4DCkt9fyl6pVjimJOJY7knP104w7jsAv/kc7WG+nc3Du4kxaM/M1lNCWacnwbqshvXJIXc6+OG2dbW1u9o6Mj7GZIHRq2xJFS2+edXnjy74H44O3qblEiVz1W1gHd1Pccv18j23b1laQur5p/dTCzTndvTT+unr/ICLLW3TuWwiPXBSe9+hTM/Tw07IMP7Cbm8GRsJv9uC7ghPoWWYd6/FN9cku9Xyrq8bgqrbgp/kRFkLXGsXzH0xN//OuPeuWHNQKrYmVBSETTgK5JFcn9egHtOeJln9/sb1oy9hpbfJAZ0p5079AXTzs1p79xy0R69Mhz1/EUyWP7cb/nKipc4wTdyVMMjfCSyZ7zJn/6XYFnas/4uOJCs+SeWZaiURdIqpR1SmTTgK5Kmc8s2brvrHj5lP+LM6PNECRZeS96k5YAddCT89ZrwGimSIw34iuSgc8s2Xnj4G9zb8K9DQt+dwZu0MGDagtDamEozbmS0FP4iCZ1btvHw3X/PLZElRPA9PX2HOMbOhvHs2wCNrVfsKfmESHfZSiEU/lIRQu3BJubq++6JfCXyg8HgT3b04xal+4Nfo/eYhXvaWN4WZqTZPFIIhb+ELtQebMpc/RaCFZaTZR7MsKlnE517Hb3xKRXXy9ZdtlIIhb+ELpQebHLD9A2PDh4yIGKRoNIfiRA5e8/GKm0/35RXG8vxTUazeaQQCn8JXVl7sN3tsHY5rFke7JtrQ291sbl/jY09YK+NVfJpY3KaaCzu7DOmtN8SdJetjJbCX0JXlh5sdzusvRfW/ABi/eyp6ANTz4H+XUPm6o+2jZ1btvGVFS8xEA/ev69ftXipTAp/qQipPdiil0w6lgarbcYH0p6wYCetudfltH1iLr3sts29xFPunYlETLV4qUgKfymZ0YR40Qd/k8ssDwl+g+gYOOlymHFpUffNTZaH+gbiRMz46rnHqdcvFUnhLyUx2hAv+uBv16pgo/SEuEVZd+h5jJm5kKmzzhz9+2ahQVipFgp/KYnRhnhBg7/JGTypg7XN8yC6D8R2EyfCzQNXsqzrdBp7+ln2vm0lCWcNwko1UPhLSYw2xEfVc04dzI3Hgjr+opXBBWDi7ODvXat4sHcyy57bRzdFiaDwlxIppPyRV8+5ux3uWQAD7zI4gyfWF3wDSPb+J86mMz6FNW/20BDpJhb3YS9IWi9H6oHCX0qmLOWPrlVB2JOy6lq0MSj3JKSOPzREI1w8eyIXzmzK2Lbhxip0UZBaovCX6pJe12+eF4R9rA8iDXDSQpixcMgMntTxh1gszuEH7ps1vLONVXRu2cal320bLGPd+5nwl3cQKYTCXwpS1t5wssQT6xta10/U9NPvyk3KZ/wh27kPPt9D30Awa6hvIM6Dz/co/KWqKfxl1Aqdk5/3hSNZ4vHY0Lp+8k8WI40/pLcj07npWx5VxxZIItkp/GXUCpmTP6oLR2qJJ62uP5Js4w/Z2pF+7oUzm/hhRzf9MWdM1LhwZlPOny1SiRT+MmqFzMkf9sKRab4+5FTiyVeuF7CWSeO59+pTNOArNUPhL6NWyHTOrBeObHX9pBFKPPnK5wKmm7eklpQs/M3sFuAzwNbEob919x8nnvsi8CkgBvy1uz9eqnZIaY02EPe6cERegVWrYHtP5rp+AYYbW9ByDFKvSt3z/4a73556wMyOBS4BpgN/BvzMzI5291iJ2yIVpiXyCi0Nq2DrBHjsxsR0zWgwZTNOxrp+voPE2Wr66e+j0Jd6E0bZ51zgPnffDbxmZpuA2cCzIbRFwpJa3jGDeByIB6HfcgW8d+Jedf3RDBJnqukDZd+SUTeISaUpdfj/lZldAXQA17v7NuBwoC3lnJ7EMakHycHc7d17yjtEIBIBT9ydm3aTVtJoZhdlqumXe9vIUPcoFsmioPA3s58Bh2Z46ibg34CvEUyJ/hpwB3AVwVap6TJOmzazq4GrAY444ohCmiqVILW3H2kISjzJ8s782+Cd3mFn8YxmdlG2mn45Nz4PZY9ikREUFP7untOC6Gb2XeCRxMMeYGLK003A61ne/y7gLoDW1lbdV1OtBnv7KYO5caBlEby3Kedpm6MdnE2v6Zd7kLesexSL5MjcS5OpZnaYu7+R+PvfACe7+yVmNh1YTlDn/zPgSWDKSAO+ra2t3tHRUZK2SomkL7UciQIW7KqVaRpnDVPNX8JiZp3u3pp+vJQ1/38ysxMJSjpdwGcB3H2dmd0PvAwMANdqpk8NyrTU8jCDubVOM4qk0pQs/N39k8M8dytwa6k+WypAtqWWswzmikh56Q5fKY0clloWkfAo/KUwZVyHR0SKR+Evo1fmdXhGokFVkdwp/GtEKMGXbX39EOhGKpH8KPxrQFmCL1N5ZxTr65fqIqUbqUTyo/CvASUPvo6l8OPrg/V3GvbZU97Js65fyouUbqQSyY/CvwaULPi622Htcuj8fmINHmBg99DyTh51/VJepLQ0s0h+FP41oCTB17GU+KPXgw9gBAsyOeAWIZLH9ompSt07141UIrlT+NeIogZfdzvxR6/H4gOYgXtwc26MKF+NXcl58Sm0jLKN6p2LVAaFv+zldy88waHxGJFE8MeIcF/sNB6MzWMtR3NoAeUa9c5FKoPCX/bybOxYzmYMY7wfJ8LNA1fyQzuLGBpMFakVCn/Zy+STTuMvOr9Ei6+j3Y/l/AUXcOGh41SuEakhJVvSudi0pHN5FXM+vu68FQlPGEs6SxUrVm1ed96KVKZI2A2Q2pZtA3URCZfCX0oqObc/amiwWKSCqOxTi7ItsxwCze0XqUwK/1oz0jLLIdDcfpHKo/CvFcne/vbuillmWUQql8K/FqT29iMNEIkG6zHkuMzySDRVU6T2KPxrQeqmKnGgZRG8t6koNX9N1RSpTZrtUxKlioUAAAZFSURBVAuSm6pYFKKNbHj/2SweOJfO+JSC31pTNUVqk3r+tSBlU5UNY2dw3sp++gY2FqWnrk1SRGqTwr+CZKyt5zptM7GpypM/30TfwMaibZiiqZoitUnhXyFSa+sN0QifaGniiqY/cPRjlw1O24xc+aMRa/il6KlrqqZI7VH4V4jU2vpxsQ0c2PEQzz//JkdF+miwOAMDfbzxwhMcPkL4q6cuIrlQ+FeIOUdOYHbDJs7xX3BR9BdEiRMjQowIOPTTwLOxY/lEDu+lnrqIjEThXyFaIq+wvPEfILYbc8cSm+beHz+d3/kEOm06N5x0WtjNFJEaofCvFF2riMT7AccN4hiRhkZmzr+GbTubuUElHBEpIoV/pUjO1Y/1YZEG7KSFMGMhUyfOZmrYbRORmqPwrxQpc/UrYTVOEaltCv9KkpirLyJSagUt72BmF5nZOjOLm1lr2nNfNLNNZrbRzD6acrzFzF5MPPctM7NC2iAiIvkrdG2fl4ALgF+mHjSzY4FLgOnAfOA7ZhZNPP1vwNXAlMSf+QW2QURE8lRQ+Lv7enffmOGpc4H73H23u78GbAJmm9lhwAHu/qy7O/B94LxC2iAiIvkr1aqehwPdKY97EscOT/w9/XhGZna1mXWYWcfWrVtL0lARkXo04oCvmf0MODTDUze5+4psL8twzIc5npG73wXcBdDa2pr1PBERyc+I4e/uZ47ifXuAiSmPm4DXE8ebMhwXEZEyKlXZZyVwiZntY2aTCQZ22939DWCHmc1JzPK5Asj27UFEREqk0Kme55tZD3AK8KiZPQ7g7uuA+4GXgceAa909lnjZNcDdBIPArwI/KaQNIiKSPwsm3VS+1tZW7+joCLsZIiJVxcw63b01/bj28BURqUMKfxGROqTwFxGpQwp/EZE6pPAXEalDCn8RkTqk8BcRqUMKfxGROqTwFxGpQ7Uf/t3tsOqO4KeIiAC1vodvdzvcswBifRBtDDZI1x65IiI13vPvWhUEv8eCn12rwm6RiEhFqO3wb54X9PgtGvxsnhd2i0REKkJtl30mzg5KPV2rguBXyUdEBKj18Ac641NoGziIOfEJtITdGBGRClHT4d+5ZRuX3d1G30CcxoYIyz49h5ZJ48NulohI6Gq65t+2uZe+gThxh/6BOG2be8NukohIRajp8J9z5AQaGyJEDcY0RJhz5ISwmyQiUhFquuzTMmk8yz49h7bNvcw5coJKPiIiCTUd/hBcABT6IiJD1XTZR0REMlP4i4jUIYW/iEgdUviLiNQhhb+ISB1S+IuI1CFz97DbkBMz2wpsCbsdJXYw8GbYjagg+n0Mpd/HUPp9DJXt9zHJ3Q9JP1g14V8PzKzD3VvDbkel0O9jKP0+htLvY6h8fx8q+4iI1CGFv4hIHVL4V5a7wm5AhdHvYyj9PobS72OovH4fqvmLiNQh9fxFROqQwl9EpA4p/CuMmX3dzDaY2a/N7CEzOzDsNoXJzC4ys3VmFjezup3WZ2bzzWyjmW0ysxvDbk+YzOx7ZvZHM3sp7LZUAjObaGY/N7P1if9WrsvldQr/yvNT4Dh3PwH4DfDFkNsTtpeAC4Bfht2QsJhZFFgMfAw4FrjUzI4Nt1WhWgrMD7sRFWQAuN7dpwFzgGtz+f+Hwr/CuPsT7j6QeNgGNIXZnrC5+3p33xh2O0I2G9jk7pvdvQ+4Dzg35DaFxt1/CbwVdjsqhbu/4e7PJ/6+A1gPHD7S6xT+le0q4CdhN0JCdzjQnfK4hxz+45b6Y2bNwEnAcyOdW/PbOFYiM/sZcGiGp25y9xWJc24i+Dq3rJxtC0Muv486ZxmOaY62DGFm+wP/BXze3f800vkK/xC4+5nDPW9mi4BzgDO8Dm7EGOn3IfQAE1MeNwGvh9QWqUBmNoYg+Je5+4O5vEZlnwpjZvOB/wsscPddYbdHKsJqYIqZTTazRuASYGXIbZIKYWYGLAHWu/s/5/o6hX/l+TYwDvipmb1gZneG3aAwmdn5ZtYDnAI8amaPh92mcktMAPgr4HGCwbz73X1duK0Kj5ndCzwLHGNmPWb2qbDbFLK5wCeB0xOZ8YKZfXykF2l5BxGROqSev4hIHVL4i4jUIYW/iEgdUviLiNQhhb+ISB1S+IuI1CGFv4hIHfr/5P5imyOW8LcAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ "# Vis\n", "fig, ax = plt.subplots()\n", - "ax.plot(X.cpu().numpy(), y_.cpu().numpy(), \".\", label=\"pred\")\n", "ax.plot(X.cpu().numpy(), y.cpu().numpy(), \".\", label=\"data\")\n", - "ax.set_title(f\"MSE: {loss.item():0.1f}\")\n", + "ax.plot(X.cpu().numpy(), y_hat.cpu().numpy(), \".\", label=\"pred\")\n", + "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", "ax.legend();" ] }, @@ -168,7 +537,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "- Write a proper training loop." + "- Write a proper training loop for PyTorch." ] } ], @@ -189,7 +558,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/03-modules-and-mlps.ipynb b/03-modules-and-mlps.ipynb index b7415f9..ae91458 100644 --- a/03-modules-and-mlps.ipynb +++ b/03-modules-and-mlps.ipynb @@ -859,7 +859,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.8.5" } }, "nbformat": 4, From 803bdf31d575ae4a72b22259218b5b2c0b6d1f53 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Tue, 13 Oct 2020 07:09:01 -0300 Subject: [PATCH 04/18] Clear outputs --- 01-pytorch-basics.ipynb | 1155 +++++------------------------------- 02-linear-regression.ipynb | 187 ++---- 2 files changed, 177 insertions(+), 1165 deletions(-) diff --git a/01-pytorch-basics.ipynb b/01-pytorch-basics.ipynb index d5f6815..573c3c4 100644 --- a/01-pytorch-basics.ipynb +++ b/01-pytorch-basics.ipynb @@ -28,20 +28,9 @@ }, { "cell_type": "code", - "execution_count": 45, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 45, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "import numpy as np\n", "import torch\n", @@ -52,21 +41,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v1: [0 1 2 3 4 5 6 7 8 9]\n", - "\n", - "v2: [10 11 12 13 14 15 16 17 18 19]\n", - "\n", - "Dot product: 735\n" - ] - } - ], + "outputs": [], "source": [ "v1 = np.arange(10)\n", "v2 = np.arange(10, 20)\n", @@ -78,21 +55,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v1: tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])\n", - "\n", - "v2: tensor([10, 11, 12, 13, 14, 15, 16, 17, 18, 19])\n", - "\n", - "Dot product: 735\n" - ] - } - ], + "outputs": [], "source": [ "v1 = torch.arange(10)\n", "v2 = torch.arange(10, 20)\n", @@ -111,23 +76,11 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v3: [2 4 6 8]\n", - "\n", - "v4: [0.72639872 0.73935424 0.68015035 0.47352003 0.45043365 0.71525855\n", - " 0.18119405 0.1819192 0.9227315 0.67426907]\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "v3 = np.array([2, 4, 6, 8])\n", "v4 = np.random.random(10)\n", @@ -138,21 +91,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v3: tensor([2, 4, 6, 8])\n", - "\n", - "v4: tensor([0.8405, 0.6914, 0.0403, 0.7377, 0.3544, 0.5140, 0.4915, 0.1579, 0.4751,\n", - " 0.7653])\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "v3 = torch.tensor([2, 4, 6, 8])\n", "v4 = torch.rand(10)\n", @@ -170,18 +111,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([0.8405, 0.1000, 0.0403, 0.7377, 0.3544, 0.5140, 0.4915, 0.1579, 0.4751,\n", - " 0.7653])\n" - ] - } - ], + "outputs": [], "source": [ "v4[1] = 0.1\n", "print(v4)" @@ -203,7 +135,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -212,18 +144,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(0)\n", - "torch.Size([])\n" - ] - } - ], + "outputs": [], "source": [ "print(v1[0])\n", "print(v1[0].shape)" @@ -238,18 +161,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0\n", - "True\n" - ] - } - ], + "outputs": [], "source": [ "number = v1[0].item()\n", "print(number)\n", @@ -265,22 +179,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[1., 0., 0.],\n", - " [0., 1., 0.],\n", - " [0., 0., 1.]])" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "A = torch.eye(3)\n", "A" @@ -288,22 +189,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[1., 0., 0.],\n", - " [0., 1., 0.],\n", - " [0., 0., 1.]], dtype=float32)" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# torch --> numpy\n", "B = A.numpy()\n", @@ -312,22 +200,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[1., 0., 0.],\n", - " [0., 1., 0.],\n", - " [0., 0., 1.]], dtype=torch.float64)" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# numpy --> torch\n", "torch.from_numpy(np.eye(3))" @@ -342,80 +217,36 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v1" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([10, 11, 12, 13, 14, 15, 16, 17, 18, 19])" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v2" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([10, 12, 14, 16, 18, 20, 22, 24, 26, 28])" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v1 + v2" ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([ 0, 11, 24, 39, 56, 75, 96, 119, 144, 171])" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v1 * v2" ] @@ -429,42 +260,18 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "Integer division of tensors using div or / is no longer supported, and in a future release div will perform true division as in Python 3. Use true_divide or floor_divide (// in Python) instead.", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mv1\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mv2\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m: Integer division of tensors using div or / is no longer supported, and in a future release div will perform true division as in Python 3. Use true_divide or floor_divide (// in Python) instead." - ] - } - ], + "outputs": [], "source": [ "v1 / v2 " ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([0.0000, 0.0909, 0.1667, 0.2308, 0.2857, 0.3333, 0.3750, 0.4118, 0.4444,\n", - " 0.4737])" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "x = v1.float()\n", "y = v2.float()\n", @@ -480,60 +287,27 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([0., 1., 2., 3., 4., 5., 6., 7., 8., 9.])" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "x" ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "x + 1" ] }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([ 0., 1., 4., 9., 16., 25., 36., 49., 64., 81.])" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "x ** 2" ] @@ -547,38 +321,9 @@ }, { "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "m1: tensor([[0.7911, 0.7790, 0.3392, 0.1068],\n", - " [0.8816, 0.0246, 0.8745, 0.9065],\n", - " [0.5135, 0.7818, 0.1493, 0.4424],\n", - " [0.0682, 0.0821, 0.3905, 0.4508],\n", - " [0.6486, 0.0946, 0.3196, 0.8485]])\n", - "\n", - "m2: tensor([[0.2707, 0.4952, 0.7372, 0.4750, 0.1376],\n", - " [0.8742, 0.8034, 0.6029, 0.8402, 0.8754],\n", - " [0.3132, 0.6785, 0.7391, 0.8908, 0.3854],\n", - " [0.1835, 0.9370, 0.7891, 0.0467, 0.4983]])\n", - "\n" - ] - }, - { - "ename": "RuntimeError", - "evalue": "1D tensors expected, got 2D, 2D tensors at ../aten/src/TH/generic/THTensorEvenMoreMath.cpp:83", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"m1: %s\\n\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mm1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"m2: %s\\n\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm1\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m: 1D tensors expected, got 2D, 2D tensors at ../aten/src/TH/generic/THTensorEvenMoreMath.cpp:83" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "m1 = torch.rand(5, 4)\n", "m2 = torch.rand(4, 5)\n", @@ -597,44 +342,20 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[1.0210, 1.3478, 1.3878, 1.3374, 0.9747],\n", - " [0.7003, 1.8990, 2.0262, 1.2607, 0.9315],\n", - " [0.9505, 1.3983, 1.3094, 1.0544, 1.0331],\n", - " [0.2952, 0.7871, 0.7441, 0.4703, 0.4564],\n", - " [0.5140, 1.4091, 1.4409, 0.7118, 0.7180]])\n" - ] - } - ], + "outputs": [], "source": [ "print(m1.mm(m2))" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[1.0210, 1.3478, 1.3878, 1.3374, 0.9747],\n", - " [0.7003, 1.8990, 2.0262, 1.2607, 0.9315],\n", - " [0.9505, 1.3983, 1.3094, 1.0544, 1.0331],\n", - " [0.2952, 0.7871, 0.7441, 0.4703, 0.4564],\n", - " [0.5140, 1.4091, 1.4409, 0.7118, 0.7180]])\n" - ] - } - ], + "outputs": [], "source": [ "print(m1 @ m2)" ] @@ -648,27 +369,9 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[1.3949, 1.0731, 1.3313, 0.8972, 1.3460],\n", - " [1.1715, 0.9119, 1.2318, 0.5970, 1.1741],\n", - " [0.6570, 0.4586, 0.6742, 0.7064, 0.8182],\n", - " [0.7295, 0.5646, 0.8505, 1.0040, 1.1814],\n", - " [1.0063, 0.5420, 0.8679, 0.7967, 0.8066]],\n", - "\n", - " [[0.5311, 0.6051, 0.5187, 0.2731, 0.2563],\n", - " [0.8225, 0.7501, 0.7612, 0.4548, 0.3712],\n", - " [0.9531, 0.5223, 0.5775, 0.3952, 0.3753],\n", - " [1.1869, 0.7789, 0.7322, 0.5213, 0.5131],\n", - " [0.5715, 0.3806, 0.2838, 0.2202, 0.2585]]])\n" - ] - } - ], + "outputs": [], "source": [ "m1 = torch.rand(2, 5, 4)\n", "m2 = torch.rand(2, 4, 5)\n", @@ -685,29 +388,11 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[[1.3949, 1.0731, 1.3313, 0.8972, 1.3460],\n", - " [1.1715, 0.9119, 1.2318, 0.5970, 1.1741],\n", - " [0.6570, 0.4586, 0.6742, 0.7064, 0.8182],\n", - " [0.7295, 0.5646, 0.8505, 1.0040, 1.1814],\n", - " [1.0063, 0.5420, 0.8679, 0.7967, 0.8066]],\n", - "\n", - " [[0.5311, 0.6051, 0.5187, 0.2731, 0.2563],\n", - " [0.8225, 0.7501, 0.7612, 0.4548, 0.3712],\n", - " [0.9531, 0.5223, 0.5775, 0.3952, 0.3753],\n", - " [1.1869, 0.7789, 0.7322, 0.5213, 0.5131],\n", - " [0.5715, 0.3806, 0.2838, 0.2202, 0.2585]]])\n" - ] - } - ], + "outputs": [], "source": [ "print(m1 @ m2)" ] @@ -721,21 +406,9 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "Expected 3-dimensional tensor, but got 4-dimensional tensor for argument #1 'batch1' (while checking arguments for bmm)", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mm2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm1\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbmm\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m: Expected 3-dimensional tensor, but got 4-dimensional tensor for argument #1 'batch1' (while checking arguments for bmm)" - ] - } - ], + "outputs": [], "source": [ "m1 = torch.rand(2, 3, 5, 4)\n", "m2 = torch.rand(2, 3, 4, 5)\n", @@ -752,55 +425,11 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([2, 3, 5, 5])\n", - "tensor([[[[1.5123, 1.6268, 1.8535, 2.1311, 1.3917],\n", - " [1.0503, 0.6726, 1.0155, 1.1335, 0.8117],\n", - " [0.6293, 0.2779, 0.4590, 0.5317, 0.4186],\n", - " [1.0184, 1.3185, 1.3476, 1.5560, 1.0910],\n", - " [0.5591, 0.5152, 0.7758, 0.8843, 0.3094]],\n", - "\n", - " [[1.2068, 1.1475, 1.1560, 1.0750, 1.2943],\n", - " [0.3632, 0.5753, 0.4025, 0.2080, 0.4787],\n", - " [0.9171, 0.6348, 0.6151, 0.6738, 0.9318],\n", - " [0.6193, 1.8378, 1.1507, 0.9540, 1.1438],\n", - " [0.7270, 1.4191, 1.0204, 1.2311, 1.0825]],\n", - "\n", - " [[1.4094, 1.1296, 0.6805, 1.5907, 1.6366],\n", - " [0.4048, 0.2806, 0.2585, 0.4226, 0.2137],\n", - " [1.1688, 0.9231, 0.6348, 1.2367, 0.9975],\n", - " [0.8251, 0.5466, 0.5144, 1.2760, 0.8789],\n", - " [0.8575, 0.6042, 0.4822, 1.4390, 1.2007]]],\n", - "\n", - "\n", - " [[[0.7810, 0.5242, 0.5863, 1.0640, 0.7554],\n", - " [0.7935, 0.1950, 0.5598, 0.4831, 0.3593],\n", - " [1.6333, 0.8133, 1.2474, 1.7215, 1.1238],\n", - " [0.8468, 0.6065, 0.2264, 1.4667, 0.7630],\n", - " [1.3928, 0.6515, 0.8458, 1.4605, 1.1252]],\n", - "\n", - " [[1.1226, 1.4591, 1.3862, 0.8904, 0.9946],\n", - " [1.1494, 1.1454, 1.4351, 0.6208, 0.6796],\n", - " [1.1410, 1.4001, 1.5988, 1.2281, 0.9615],\n", - " [0.9949, 1.1179, 1.3078, 0.3292, 0.6981],\n", - " [1.1931, 1.3291, 1.6020, 0.9667, 0.8592]],\n", - "\n", - " [[1.2809, 0.4417, 1.1952, 0.6683, 0.9444],\n", - " [1.3687, 0.5593, 1.0666, 0.8061, 1.0604],\n", - " [1.8253, 0.7350, 1.5180, 1.0972, 1.5916],\n", - " [1.4248, 0.5172, 0.7330, 0.7102, 1.0248],\n", - " [1.2086, 0.2634, 0.8638, 0.4023, 0.7880]]]])\n" - ] - } - ], + "outputs": [], "source": [ "print(m1.matmul(m2).shape)\n", "print(m1.matmul(m2))" @@ -820,57 +449,11 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[[[1.5123, 1.6268, 1.8535, 2.1311, 1.3917],\n", - " [1.0503, 0.6726, 1.0155, 1.1335, 0.8117],\n", - " [0.6293, 0.2779, 0.4590, 0.5317, 0.4186],\n", - " [1.0184, 1.3185, 1.3476, 1.5560, 1.0910],\n", - " [0.5591, 0.5152, 0.7758, 0.8843, 0.3094]],\n", - "\n", - " [[1.2068, 1.1475, 1.1560, 1.0750, 1.2943],\n", - " [0.3632, 0.5753, 0.4025, 0.2080, 0.4787],\n", - " [0.9171, 0.6348, 0.6151, 0.6738, 0.9318],\n", - " [0.6193, 1.8378, 1.1507, 0.9540, 1.1438],\n", - " [0.7270, 1.4191, 1.0204, 1.2311, 1.0825]],\n", - "\n", - " [[1.4094, 1.1296, 0.6805, 1.5907, 1.6366],\n", - " [0.4048, 0.2806, 0.2585, 0.4226, 0.2137],\n", - " [1.1688, 0.9231, 0.6348, 1.2367, 0.9975],\n", - " [0.8251, 0.5466, 0.5144, 1.2760, 0.8789],\n", - " [0.8575, 0.6042, 0.4822, 1.4390, 1.2007]]],\n", - "\n", - "\n", - " [[[0.7810, 0.5242, 0.5863, 1.0640, 0.7554],\n", - " [0.7935, 0.1950, 0.5598, 0.4831, 0.3593],\n", - " [1.6333, 0.8133, 1.2474, 1.7215, 1.1238],\n", - " [0.8468, 0.6065, 0.2264, 1.4667, 0.7630],\n", - " [1.3928, 0.6515, 0.8458, 1.4605, 1.1252]],\n", - "\n", - " [[1.1226, 1.4591, 1.3862, 0.8904, 0.9946],\n", - " [1.1494, 1.1454, 1.4351, 0.6208, 0.6796],\n", - " [1.1410, 1.4001, 1.5988, 1.2281, 0.9615],\n", - " [0.9949, 1.1179, 1.3078, 0.3292, 0.6981],\n", - " [1.1931, 1.3291, 1.6020, 0.9667, 0.8592]],\n", - "\n", - " [[1.2809, 0.4417, 1.1952, 0.6683, 0.9444],\n", - " [1.3687, 0.5593, 1.0666, 0.8061, 1.0604],\n", - " [1.8253, 0.7350, 1.5180, 1.0972, 1.5916],\n", - " [1.4248, 0.5172, 0.7330, 0.7102, 1.0248],\n", - " [1.2086, 0.2634, 0.8638, 0.4023, 0.7880]]]])" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "torch.einsum('bcid,bcdj->bcij', m1, m2)" ] @@ -895,7 +478,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -905,24 +488,9 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "m: tensor([[0.4963, 0.7682, 0.0885, 0.1320],\n", - " [0.3074, 0.6341, 0.4901, 0.8964],\n", - " [0.4556, 0.6323, 0.3489, 0.4017],\n", - " [0.0223, 0.1689, 0.2939, 0.5185],\n", - " [0.6977, 0.8000, 0.1610, 0.2823]])\n", - "\n", - "v: tensor([0, 1, 2, 3])\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "print(\"m:\", m)\n", "print()\n", @@ -932,22 +500,9 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "m + v:\n", - " tensor([[0.4963, 1.7682, 2.0885, 3.1320],\n", - " [0.3074, 1.6341, 2.4901, 3.8964],\n", - " [0.4556, 1.6323, 2.3489, 3.4017],\n", - " [0.0223, 1.1689, 2.2939, 3.5185],\n", - " [0.6977, 1.8000, 2.1610, 3.2823]])\n" - ] - } - ], + "outputs": [], "source": [ "m_plus_v = m + v\n", "print(\"m + v:\\n\", m_plus_v)" @@ -962,23 +517,9 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "m[0] = tensor([0.4963, 0.7682, 0.0885, 0.1320])\n", - "\n", - "v = tensor([0, 1, 2, 3])\n", - "\n", - "m[0] + v = tensor([0.4963, 1.7682, 2.0885, 3.1320])\n", - "\n", - "(m + v)[0] = tensor([0.4963, 1.7682, 2.0885, 3.1320])\n" - ] - } - ], + "outputs": [], "source": [ "print(\"m[0] = %s\\n\" % m[0])\n", "print(\"v = %s\\n\" % v)\n", @@ -997,61 +538,27 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "torch.Size([4])" - ] - }, - "execution_count": 51, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v.shape" ] }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([0, 1, 2, 3])" - ] - }, - "execution_count": 52, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v" ] }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[0, 1],\n", - " [2, 3]])" - ] - }, - "execution_count": 53, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v = v.view(2, 2)\n", "v" @@ -1059,23 +566,9 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[0],\n", - " [1],\n", - " [2],\n", - " [3]])" - ] - }, - "execution_count": 54, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v = v.view(4, 1)\n", "v" @@ -1090,21 +583,9 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "The size of tensor a (5) must match the size of tensor b (4) at non-singleton dimension 0", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mm\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (5) must match the size of tensor b (4) at non-singleton dimension 0" - ] - } - ], + "outputs": [], "source": [ "m + v" ] @@ -1118,24 +599,9 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[0.4963, 1.7682, 2.0885, 3.1320],\n", - " [0.3074, 1.6341, 2.4901, 3.8964],\n", - " [0.4556, 1.6323, 2.3489, 3.4017],\n", - " [0.0223, 1.1689, 2.2939, 3.5185],\n", - " [0.6977, 1.8000, 2.1610, 3.2823]])" - ] - }, - "execution_count": 56, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "v = v.view(1, 4)\n", "m + v" @@ -1163,7 +629,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1175,21 +641,9 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "The size of tensor a (0) must match the size of tensor b (2) at non-singleton dimension 1", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrand\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mz\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;31m# x and y are not broadcastable, because x does not have at least 1 dimension\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (0) must match the size of tensor b (2) at non-singleton dimension 1" - ] - } - ], + "outputs": [], "source": [ "x = torch.rand((0,))\n", "y = torch.rand(2,2)\n", @@ -1199,7 +653,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1216,21 +670,9 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "RuntimeError", - "evalue": "The size of tensor a (2) must match the size of tensor b (3) at non-singleton dimension 1", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mempty\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mempty\u001b[0m\u001b[0;34m(\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mz\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;31m# x and y are not broadcastable, because in the 3rd trailing dimension 2 != 3\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (2) must match the size of tensor b (3) at non-singleton dimension 1" - ] - } - ], + "outputs": [], "source": [ "# but:\n", "x = torch.empty(5,2,4,1)\n", @@ -1259,7 +701,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1278,7 +720,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1287,20 +729,9 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "torch.Size([200])" - ] - }, - "execution_count": 73, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "x.shape" ] @@ -1314,32 +745,9 @@ }, { "cell_type": "code", - "execution_count": 74, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 74, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO29eXSkV3Xo+9tVmlWluUpjj5K6Wz233e42NhgbD7FNwEAG7JuAc0Oen19gLchL7orzeI+wknfvJcnKsEgILCfhQSYIJCbuCwZjDNhgPHW3e1S3xh6k1lSaVZpLdd4f9X1yoVZ1S6r6hqo6v7Vqqeobt3bVd/Y5Z++ztyil0Gg0Gk324nFaAI1Go9E4izYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+VoQ6DRaDRZTo7TAmyEqqoqtXXrVqfF0Gg0mrTixIkTw0qpwMrtaWkItm7dyvHjx50WQ6PRaNIKEbmy2nY9NaTRaDRZjjYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+VoQ6DRaDRZTkoMgYh8WUSGRORcgv0iIp8XkU4ROSMit8Tte1BE2ox9T6VCHo1Go9GsnVSNCL4CPHiD/Q8BzcbrCeCLACLiBb5g7N8NPCYiu1Mkk0aj0WjWQErWESilXhaRrTc45BHgH1Us5/VrIlImIrXAVqBTKdUNICJfN45tTYVcmtQyNDnHq90jTM4u8vC+Wip9+U6LlFUMh+d5rXuE0NQ897VUs6miyGmRsorp+Qg/uDBIeD7CwU1l7KkrdVqklGHXgrJ6oCfuc6+xbbXtR1e7gIg8QWw0webNm62RUpOQb5/p4//8xmkWIlEA/ujbrTz1UAsfe+c2hyXLDl5uD/HJr7/F2MwiAP/juQt87J3b+f0HdyIiDkuX+bxxaZTf/eYpekZnARCBj9y+hU+/t4X8HK/D0iWPXYZgtV+qusH26zcq9TTwNMDhw4d1NR0b+ebxHv7bv5/h8JZyPvv+PeR6PfzZ82388bdbyfUKH33HVqdFzGhebg/xG//fGzQH/fz947dRUZzHX/+wgy+91EVhrpdP3tfstIgZzcWBST765depLingnz92lC2VRfzDTy/xlZ9dZnFJ8T8/tM9pEZPGLkPQC2yK+9wA9AF5CbZrXELP6Ax/eOw8dzRW8uXfuI2C3Fjv54u/fgv/xz+f4A+PnefQpnL2NWTOMNlNjE4v8LvfPE1T0Mczv30HxfmxR/bPf+UAAH/5g3Zaav08sKfGSTEzlqm5RX77n09SUpDLN598B0F/AQCfff8eCvO8fPHHXRxoKOXRI+k9S2FX+Ogx4KNG9NDtwIRSqh94E2gWkW0ikgc8ahyrcQFKKZ565gweEf7sVw4sGwGAXK+Hv/jwQSqL8/jMsXNEo3qQZgV/eOw8EzOL/NWHDy0bAQAR4XMf2s/Oaj//73cuMB9ZclDKzOVvftjJ5ZFp/vqxQ8tGwOT3HtjJHY2V/PfnLjA+s+CQhKkhVeGjXwNeBXaKSK+IfExEnhSRJ41DngO6gU7g74DfBlBKRYBPAM8DF4BvKKXOp0ImTfL8uD3EK50j/P6DO6kvK7xuf0lBLk891MJbV8d55q1rDkiY2bQNTPG/TvfxxF3b2V1Xct3+vBwPn35vC1dHZ/jHn62aS0yTBEOTc3z11ct84FA9R7dXXrff6xE+877dhOcjfPGlLvsFTCGpihp67Cb7FfDxBPueI2YoNC7jiz/uora0gA/flnjY+6FD9XzlZ5f44o87+dChejwe7bhMFX/9ww6K87z81rsSO+Tv2hHg3TsC/M2POvn127dQmJf+jku38IUfdRJZUnzy3sQ+mF01JXzwYD1feeUyv3nnNqpLChIe62b0ymLNqpy4MsYbl0b5rXdtJy8n8c/E4xF+653b6QpN83JHyEYJM5vuUJjvnO3n8Tu2UlaUd8Njf/vuRiZmFzl2Wo/KUsX4zAJfe7OHX761gS2VxTc89pP3NbOwFOVfX79qk3SpRxsCzap8+ZVLlBbm8uhtm2567MP7agn68/nyK5etFyxL+NobV/GK8F/vvHl47pFtFeys9vOPr14hNvjWJMszJ6+xEImuKSJuS2UxdzUH+Lc3e4gsRa0XzgK0IdBcx8TsIi+0DvLBQ/U/56BMRF6Oh1+/fQsvt4e4MjJtg4SZzeJSlG+91cd7dgUJ+G++aE9E+Mg7tnC+b5KTV8dtkDCzUUrxb2/2cKChdFXfzGr8l6ObGZic40dt6Tkq1oZAcx3Pne1nIRLlQ7fUr/mcX7q1AYD/dVpH/ybLS20hhsPz/Mrhm4/GTD54qJ6iPC//cbLXQsmyg7d6xmkbnFpXSOh7dgUJ+vP52hvpOT2kDYHmOp452UtT0Me++rWvDagvK+S2reUc04Ygaf79RC9Vvjzu3nldadmEFOfncG9LNd87N5C20xNu4dm3rlGQ6+F9B+rWfE6u18MHD9XzcnuICWP1dzqhDYHm5+gdm+HNy2N88FD9ulMXvP9AHe2DYS4OTFokXeYzsxDhR21D/OL+OnK963s837uvltHpBV7rHrVIuswnGlU8f36Qd+8I4FvDtGg8D+2rJRJV/ODCoEXSWYc2BJqf44XW2I/4vftq133uw/tq8XqEY6f0qGCjvNw+zHwkygN7qtd97t07AxTnefnOWa3/jXK6d5yByTke3Lv+ldoHGkqpKy3gu+cGLJDMWrQh0PwcL14YoinoY2vVjUPmVqPSl8/RbRW8eGHIAsmyg++3DlBamMuRrRXrPrcg17s8PbSkV3pviO+dHyDHI7xn1/oNsYjwC3treLkjRHg+YoF01qENgWaZyblFXr80wr0twQ1f456dQdoGp7g2PptCybKDyFKUFy8McW9LkJx1TguZ3L+7mrGZRU716Oih9aKU4vlzA9zRVEVpYe6GrvHQ3loWIlF+3JZenSFtCDTLvNweYnFJcX/L+ntDJvfsijk40+1BcANvXh5jYnaRB3ZvXP/vaq7CI/BSe3qGMTrJ5ZEZLo/McH8SHaFbNpdRUpDDy2mmf20INMv88MIQFcV5HNpcvuFrNAZ8bKoo5EcX0+tBcAMvd4TI8QjvbF57tNBKyoryOLipjJe0IV43PzFWxt+1Y+P6z/F6eGdzFT/pGE6rxX3aEGiA2LD4J53DvLOpCm8S+YJEhHt2Bnmlc1hnxFwnr3QOc8vm8nVHq6zk7p1BzlybYCQ8nyLJsoOX24fZVFF405QSN+NdzQH6J+boHAqnSDLr0YZAA0BXKExoap47m67Psrhe3r0jwOziEieujKVAsuxgfGaBs9cmuLOpKulrvXtHAKXgJx3DKZAsO1hcivJq1zDvSmI0ZmKOKF5OI/1rQ6AB4JXOEQDuaEy+ITqyrQKPoOPZ18GrXSMoBe9sTt4Q76svpbwol1c606chcpq3ro4zvbDEXc3J//7rywppDBSnlZ9AGwINAD/rig2LU1EQ3V+Qy776Ul7rGkmBZNnBTzuH8eXnsL+hLOlreTzCbVsreP2SNsRr5aedw3gE3rE9eUMA8M6mKt64NMpimqzy1oZAw1JU8WrXCHek6CEAuH17Jad6xpld0H6CtfCzrhGObqtY92riRBzdXsnV0Rn6dBjvmnjj0gi760ooLdpY2OhKjm6vZHZxiXPXJlJyPatJVYWyB0WkTUQ6ReSpVfb/NxE5ZbzOiciSiFQY+y6LyFlj3/FUyKNZH619k0zORbgjBf4Bk9sbK1lYinLyqvYT3IyhqTkuDU9zdPv6F5El4ui22LVev6RHZTdjPrLEW1fHuW0Di/gSYV7rzcvpMSpL2hCIiBf4AvAQsBt4TER2xx+jlPozpdRBpdRB4A+Al5RS8Rq6x9h/OFl5NOvH/LEe2ZbaB8HrEV7r1g3RzThxOWYsU9kQtdSW4C/I4XXtp7kp565NMB+JLhvPVBDw57O9qpg30mR6LhUjgiNAp1KqWym1AHwdeOQGxz8GfC0F99WkiBNXxqgvK6S29Pq6xBvFl5/D3vpSPU+9Bt68PEZBroc9dWvP9nozvB7hiPYTrIk3LsUM8eEUGmKIdazeuDRKNA3SfaTCENQDPXGfe41t1yEiRcCDwH/EbVbA90XkhIg8kegmIvKEiBwXkeOhUPp4492OUorjV0a5dcvGF5El4pbNZZzpHU8bh5lTvHl5lIObym5YEnQjHN1ewaXhaYam5lJ63UzjzcujbA8UU+W7eRGg9XBkWwWTcxHaBqdSel0rSMUvb7XVR4lM4PuAV1ZMC92plLqF2NTSx0XkrtVOVEo9rZQ6rJQ6HAgkH+uridE7Nsvg5DyHt6beENy6pZy5xSgX+nVa6kSE5yOc75tI6bSQyS3GCvFTumpZQqJRxfHLoymdFjJJJz9BKgxBLxBfSqkBSJQH91FWTAsppfqMv0PAt4hNNWlswlz0Zc2IIHbNk3phWUJOXR0nqlI/LQGwt76UXK/wlk5Al5CuUJjJuQi3bkm9/hvKCwn489PCEKfCELwJNIvINhHJI9bYH1t5kIiUAu8Gno3bViwifvM98ABwLgUyadbI8Suj+PJz2FWzttqs66GurJCakgJdR/cGnLw6hggc2pz8+oGVFOR62V1bog3xDTCN5MFNqde/iHBoU1laGOKkDYFSKgJ8AngeuAB8Qyl1XkSeFJEn4w79IPB9pVR8dfNq4Kcichp4A/iOUup7ycqkWTsnr4xzcFNZUvmFbsStW8p1qokbcLpnnMaAj5KC1MSvr+TQ5nLO9E7o8pUJONUzjr8gh+0bqL+xFg5uLuPS8DRj0wuWXD9VpMQ7pZR6Tim1QynVqJT678a2LymlvhR3zFeUUo+uOK9bKXXAeO0xz9XYw9ziEm2DUxzYlLpolZUc2lzGtfFZBie1w3IlSilO945b0hs1ObS5jFnje9Zcz1tXY/r3WNQROrTJ8NP0untUoFcWZzHn+yZZiioOpCCtQSLMlNa6UMr1XBufZTi8wAELDYHpp3lLT89dx8xChLaBSUsN8f6GUjzifoe9NgRZzGmjcbayIdpdW4LXI5ztTY+l9nZyuiemk4MWGuKG8kKqfPnaEKzC2d4Josoa/4BJcX4OO6r9rvcTaEOQxZzpHaempIDqkgLL7lGY52VHtZ8zaZJzxU5O946Tl+NhZ43fsnuICPvqS9Im542dnLLQURzPoc1lnO4Zd3WhGm0IspgzvRPsb7DOP2Cyv76Us73ufhCc4NTVcfbUlaR8IdlK9tWX0jE0pRMAruDMtQnqywqpTPFCspXsqy9jYnaRnlH3JgDUhiBLmZhdpHt42tJpIZN9DaWMzSzSO+beB8FulqKKc30TlvpnTPY1lBFV0NqvRwXxXOibZE9d6sOmV7K3PnaPc33u1b82BFmKOVVgy4jAuMdZPT2xzJWRaWYWlmxpiPbVG/rXfpplpucjXBqZTml+p0TsrPGT6xVX//61IchSWvtiaR/22vQg5Hk9nNEN0TKtRtqN3TYYguqSfKp8+dpPE8eF/kmUskf/+TkxP5mb/TTaEGQprf2T1JYWUF6cZ/m98nO87Kr1c/aauyMn7KS1b5Jcr9ActM5RbCIi7G8odXVDZDemIbZjRAaxDtfZaxOu9ZNpQ5CltPZN0lJrz0MAsTDS1r5J1z4IdnO+b5KmoN9yR7HJ3vpSOofCzCxEbLmf2zl/bZLyolxqS62LmItnb0Mp4zOLXHNpxThtCLKQucUlukJhdttoCHbV+BmbWWRoat62e7qZ1v5JW/W/p66EqIKLA3qFMcT0v6euFBFrVhSvZK8x8nDrqEwbgiykcyhMJKpsHRGY92rVKakZmpojNDVvy/y0iWl0LvZrQ7C4FKVtYMpW/bcYCyvPXXPn718bgizETkelyS6jIdK1CeCC0RjbOSKoLyvEl5/DxQGt/86hMAtLUdv8AxDLBLutqti1IzJtCLKQ1r5JivK8bKkosu2epYW51JcVLjeC2YwZsWWnIfB4hJ01fm2IeVv/dhoCiE2Ptg26U/9ZZQhe7Rrhn1697LQYjnOhf5JdNX7LMi4moqW2RDdExEZkDeWFlBZZk3o6ES21fi72T2W9w/583yQFuR62VflsvW9LbQk9o7NMzS3aet+1kFWG4PutA/yP5y6mRTFpq1BK0dpvb8SQSUutn+5QmLnF7E510No3YetowGRXTQlT8xHXRq7Yxfm+CXbVlFhWgyMRO6tjocLtLkwJnhJDICIPikibiHSKyFOr7L9bRCZE5JTx+sxaz00lu2r8zC4u0TM2Y+VtXM218Vmm5iK2+gdMWmpjkSsdg2Hb7+0WZhYidA9PO6T/WEOUzQ5jsyNk97QQwC5D/26cHk3aEIiIF/gCseLzu4HHRGT3Kof+RCl10Hj90TrPTQk7DIvsVoeNHZjzo86MCLTDuG1gKrai1QH976zR+u8dc64jVF9WiN+lDvtUjAiOAJ1GtbEF4OvAIzacu25MQ9CexYbgQv8UIrHRkd1sqSiiKM+b1SGkTkRsmfjyc9hcUZTVHaHzRuI3O3IMrURE2GX4adxGKgxBPdAT97nX2LaSd4jIaRH5rojsWee5KaHYfBBcOEdnF639E2yrLKYoL8f2e+vIlZijsqQgh/qyQkfuv6vGzwUX9kjtorVvEo9DHSGI+WkuDrjPYZ8KQ7Cax2Xlf3kS2KKUOgD8NfCf6zg3dqDIEyJyXESOh0KhDQu7o9pPWxb3iFr7J2lxoDdq4tYHwS5a+ybZXVdi24rWlbTUlnB5eDpraxO09k+xPeCjINfryP131foJz0dcl5I9FYagF9gU97kB6Is/QCk1qZQKG++fA3JFpGot58Zd42ml1GGl1OFAILBhYXfV+Lk0PM18JPsehMm5WHEMJ+anTXbX+pmYXaR/IvuK2S9FFW0DU474Z0xaav1ElTsjV+ygY2hqOXrHCXYZfhq3dUZTYQjeBJpFZJuI5AGPAsfiDxCRGjG6QCJyxLjvyFrOTTU7avwsRRVdQ9NW3saVXHRgRetKstlh3DM6w+zikmPTEvB2Q+RGh6XVzC4scXV0huZqe9cPxGOWJXWb/pM2BEqpCPAJ4HngAvANpdR5EXlSRJ40Dvtl4JyInAY+DzyqYqx6brIy3QjzIXTrCj8raTN+fGYYmxNkc6oJsxfe7GCPdLPhsHdjCKPVdA6FUQpHRwS+/Bw2VRRywWUjgpR4DI3pnudWbPtS3Pu/Af5mredaybaqYnK9QttA9sWytw+G8efnUGNhsfqbYUauZGND1DEU+801B53rkWazw94NhhgMP5nL9J9VK4sBcr0eGgO+5d5xNtE+OEVztc8xR6XJrhq/64bGdtA+OEVdaQH+AntTS6wkWx327YNT5Hk9bK20L8fWarQYfko3rbDPOkMAsXk6tzlr7KBjKLy8lsJJdlT7uTwyw0Ik6rQottI+GHa8NwrZ67BvH5xie6CYHK+zzd4uF66wz1pD0Dcxx6QLkz9ZxUh4ntHpBZocnJYwaa72sRRVXBrOHof9UlTRFQqzw0FHpYnZGTCnqrKF9kF3dIRMP6Wb1nNkpyHIwhXG7Ubvww0Pglmnt2Moe/R/ZWSahUjUFSMCU4aOLAohDRvJ9nY6GLFlsqWymIJcj6tmJbLTENRkX84hs9F1gyHYHijGI+4aGluNmwxxRXEelcV5dGbRiMA0ek466k28HmF7lc9V+s9KQ2BWa8qmRTXtg1P4C3KoLsl3WhQKcr1srijKqhGBmxoigKagL6umhsxn3Q2GGGLTo9oQOIyIsKPal1UjAnN+1OmIIZPman92jQiGwjSUF1Kcb3+Op9VorvbRPpg9kUPtg2EKcj1ssrEq341oDvq4Nj5LeD7itChAlhoCiKXkbcuiELrOobBreqMQexAuDU+zuJQdkUMdg1Ou6Y1CrGc8NRdhaGreaVFsoX1wiqagz/ZiNIloMvxkXS4ZFWStIdhR7WNidpFQOPMfhGEjYsgNjkqT5mofkajichZEDi0uRekKhR1NbbASM3osW0Zl7YNT7Ai66/cP7oncylpDYD4Ibpqns4q350fd0xC9HTmU+fq/MjLN4pJyV0OURZFbE7OLDE7Os8MFEUMmWyqKyPWKa9qfrDcEbhmaWUmHiyJWTBoDPiRLIofcFDFkUuXLo6woNysMcYcLO0I5Xo8ROeQOQ5y1hqCmpABffo5rLLKVtA9OUVKQQ9DvfMSQSWGel03lRbS75EGwkvbBWFU4NyzmMxERmoM+OrPAELe5LGLIpKnaPZFbWWsIRITGoI/OkDu+CCvpMFIbuCViyGRHdXY0RB2DYTaVF1GY50wxlEQ0Bf20D2V+wETHYJjiPK9jVeES0Rz0cXV0xhU5h7LWEAA0BXwZPzWhlKJ9aMpVw2KTpqCf7uEwkQyPHGofdKf+m4M+xmcWGQ4vOC2KpXQMxSKG3NYRagr6UAq6XNAZzW5DEPQxNDWf0TmHhsMLjM8sLjsH3URz0MfikuLyyIzToljGQiTKpeFpV0Vsmbydcyizp+c6h8LL4Zpuwnwm3TA9nfWGANzxRVhFh0vnR+FtmdziMLOCq6PTRKLKVWs4TMwQxkz+/U/OxSKGGoPFTotyHVurivB6xBWzEikxBCLyoIi0iUiniDy1yv5fE5EzxutnInIgbt9lETkrIqdE5Hgq5FkrzdlgCMxiKC6cmjAfTjc8CFbRaZREbQy4T/9Bfz7+gpyM1r8ZFdjkQv3n53jZUlnkivYn6fXuIuIFvgDcT6wY/Zsickwp1Rp32CXg3UqpMRF5CHgaOBq3/x6l1HCysqyXTRVF5OV4XPFFWEVXKFaVzE0RQyZFeTk0lBfSnuH6h1iiPbdhRg5l8tRQVyhmiN0UsRWPW/SfihHBEaBTKdWtlFoAvg48En+AUupnSqkx4+NrQEMK7ps0sSyAxRlvCLa70FFmsqPan9HpkLtCYWpKnK9KlojmoD+jf/+dQ2FyvcJml+QYWklz0B1FmlJhCOqBnrjPvca2RHwM+G7cZwV8X0ROiMgTiU4SkSdE5LiIHA+FQkkJHE9j0F1ZAFNN19A0jS7sjZo0BX10D0+zFM3MEMau0LQr56dNmqt9DIcXGJ3OzMihzqEwWyudr0qWCLNI0+URZ1OtpEI7q3U1V32qReQeYobg9+M236mUugV4CPi4iNy12rlKqaeVUoeVUocDgUCyMi/TFPDRM+aOWN5UE56PMDA559phMUBjoJiFSJTescyLHFJK0TUUdqV/wOTtnEOZOSrrDqWL/p3tjKbCEPQCm+I+NwB9Kw8Skf3A3wOPKKVGzO1KqT7j7xDwLWJTTbbRXB2L5e0OZV7yM9NRlg4PghtiqVPN0NQ84flImug/837/C5EoV0ZnXN4RMlKtOOwnSIUheBNoFpFtIpIHPAociz9ARDYDzwAfUUq1x20vFhG/+R54ADiXApnWzLJFdoHDJtWYjaubGyJTtkycnluOWHFxQ1RXWkhhrjcj9X95JDbl6Gb9F+TGUq04rf+ko4aUUhER+QTwPOAFvqyUOi8iTxr7vwR8BqgE/tZwWkaUUoeBauBbxrYc4F+VUt9LVqb1sK0qVjYxE5PPdYXC5HiELZXudJQBlBXlUeXLzLKJ6WCIPR5he6A4I0dk6TAihtj0qNMjspSUS1JKPQc8t2Lbl+Le/xbwW6uc1w0cWLndTvJzYmUTMzHnUNfQNJsri8h1qaPMpDHgc/xBsIKu0DTFeV5XlAe9EU1BH8cvj938wDTD7Fy42VkPsd//z7pGiEYVHocK57i7hbCJpgwNoetyuaPMxIzcyrTkZ51DYRpdHLpr0hiIlU2cWXBH2cRU0RkKU19WSFGeO8qDJqIp6GM+EuXa+KxjMmhDQOyLuDQ8nVHJzyJLUS6PTKeFIWgKxKrFjWRYCGNXKOzKFa0rMefQMy1goisUduVCvpU0mhkOHJyV0IaA2IOwuKToGXPOIqeanrFZFpeUq9cQmGRizqfwfIT+ibnlh9zNZGLkVjSq6BqadrWj2MTsrDnpp9SGAJYby0xqiJYdZenwIGSgIbgUMnMMud8Qb6kswiOZpf++iVlmF5fSwhBUFOdRUZznqCHWhgDYHsi8HlFnGkSsmNSVFlCU580o/adDxJBJLPlZZkUOdYXcm+xvNRoDxXQNOTc1pw0BUFqYS8Cfn1EhpF1DYQL+fEoL3ZnjJh4RoTGQWak+OofCeD3Clkr3jwiAjNQ/uHsNRzyxyDk9InCcxgyLpY5FDKVHIwQx/WeSs7IrFGaLkd02HWgMFmdUwETnUJjSwlwqi/OcFmVNNAV9jEwvMOZQwER6/EptoCkYi2XPhBBGpVQs2VmaDIshpv9r47NMz2dGCGMsYiWN9B/IrIAJsyPk9tBdk0aHp6e1ITBozKAQxpHpBSZmF9PKEJiyZsKoILIU5fLwjOsXMsWTaZFb3aFw2kwLgTYErsENIVypIp0ihkwyKYSxd2yWhaVoehniDDIE4zMLDIcX0kr/9eWF5DtYJEsbAoPGDMrC2JVGoYsmWyqL8XokIxoi05ilU4+0pCCXoD8/IwxxukUMQaxI1rYq53IOaUNgUFtSkDFZGLtCYQpyPdSVFjotyprJy/GwpcL5LIypYDnHTVX6NEQQM1yZoP90NMQQ64zqqSGHyaQsjJ1DYbZX+RxLYLVRnHwQUklXKEyVL5/SIveH7sbTFPTRlQE5n7pCYfK8HhrK06cjBEaRrFFnimRpQxCH07G8qaIrFE4r/4BJY8DH5ZH0D2GMRWylz7ScSWPAx9R8hNDUvNOiJEXX0DRbq4pcW54yEY1BH1GFI2Ur00tTFmNmYZxdSN+ylbMLS1wbn03LhsjM+XR1NH3LViqllrOOphuZEjnk9vKUiTCfWSdWGGtDEEdjsBil4NJw+jqMLw1Po1T6zY9CZjREo0bobjpkHV1JkwuyYCaLWZ4yHQ3B9qpY2UonZiVSYghE5EERaRORThF5apX9IiKfN/afEZFb1nqunWRCCGM65bhZiZkyOJ0bos40DN01Cfrz8eXnpHUI9dXRWHnKdFrDYVKY56W+rNCRjlDShkBEvMAXgIeA3cBjIrJ7xWEPAc3G6wngi+s41za2VhY7ZpFTRVcojEisBGe6UVKQS3VJvqPJt5IlHUN3TUQkViQojX//nUPpFzoaj1N+ylSMCI4AnUqpbqXUAvB14JEVxx6dW0MAACAASURBVDwC/KOK8RpQJiK1azzXNsxC0um8lqArNE1DeSEFuV6nRdkQjYH0boi6QmEKc71pFbobT1OaJ58zG9F0Su8RT2PAR3dommjU3sitVBiCeqAn7nOvsW0tx6zlXABE5AkROS4ix0OhUNJCJ6IxUJzeD8JQejrKTJqCPrrTOISxKxRmW1Vx2oXumjQGixmcnGdqbtFpUTZEVyhMTUkBvnx3l6dMRFPQx+ziEn0T9uZ8SoUhWO0Xv/IpTnTMWs6NbVTqaaXUYaXU4UAgsE4R107MIodtt8ipIBpVdA/H1hCkK03BWAjjUJqGMHalWY6blTQF0nuFfVdoOi39AybLkUM26z8VhqAX2BT3uQHoW+MxaznXVhpdUEh6o1wbn2VuMZrWDZE5mknHUdnc4hK9Y7NpPyKD9NS/UoruNB8RL6e6sVn/qTAEbwLNIrJNRPKAR4FjK445BnzUiB66HZhQSvWv8VxbcToLYDK8HTGUvj2idI7c6g7FQnfTuUe6uaKIXK+kpf5DU/NMzUfS2hBUFudRVpRru58s6Yk0pVRERD4BPA94gS8rpc6LyJPG/i8BzwEPA53ADPBfb3RusjIlQ/zQ7O6dTkqyftKtKtNqmCGM6dgjTdccN/HkeD1srUxPP1k6lWdNhFmtz+4RQUo8Kkqp54g19vHbvhT3XgEfX+u5TlLpy6e8KDcte0RdoWnKinKpSJOqTKthhjCmp/5jobtb06Q8ZSKagj7aBqacFmPdLIfupvGIDGKd0R9etC4gZjX0yuJVcMIip4IuY2l9ulRlSkS6hjB2DoXZVF6UtqG7Jo0BH1dGZ1iIpFfOp66hMEV5XmpKCpwWJSmagj6Gw/NMzNgXuaUNwSqka/K57lA4LVMbrMQMYZxMsxDGdE02t5KmoI+lqOKKA8nPkiFTOkLLARM2tkHaEKxCY7CY4fAC4zPpU7ZyuSpTmg+L4e0QxnQqWxmNqrRNdraSdI0c6s4QQ+xEwIo2BKvQmIax1OmcY2gl6Vg28dr4LPOR9A7dNdm+HDCRPvqfWYgYWXfTX/+bKorI83psnZ7WhmAV0jGE1MzPkwkN0RYjhDGdDMGyIc4A/Rfl5TiW/GyjdC87itNf/2+XrdSGwFEaygtjFjmNDEHnclWmIqdFSRozhDGt9D+UOSMyIO2Sz2XSiBhi09N2zkhoQ7AKOV4PW6uK0ioLZtdQLMeNN01z3Kwk3SK3ukLTlKd56G48jYFiuobsT362UbpC03gEtlSmf0cIYn6yKyPTzEfsKZKlDUECmoKxnEPpQrrnuFlJUzC9Qhi7MsRRbGImP+ufnHNalDXRFQqzqSL9Q3dNzLKVV0bsqdanDUEC0imWem5xiaujMxkRMWHSGCxOqxDG7kwzxGmW8ynds+6uZNlPaZP+tSFIQGMgfWKpr4zMEFWZ4SgzaQr4gfRoiJZDdzOpIXIo+dlGWIoqLg1nRuioyXK1Pm0InCWdIocyzVEG6RXC+HbEUOY0RE4lP9sIfUbobib9/s3ILbt+/9oQJGC7Q3nBN4LZa9ueQT2i4vwc6koL0mJE0JXm5RFXQ0TSJtVHZwaF7sazPWBf5JA2BAkozs+htrQgLYbGnaEw9WWFFOWlZ1WmRMSSz6WBIQ6FycvJjNDdeMwiTW6nK8NCd02ajOSLdkRuaUNwA9Il51BXKJxxvSF4W/9uD2HsHAqzPYNCd01iyc/cn2ol00J3TRoDPmYWlhiwIXJLG4Ib0GgMzdxcPzcaVXQNZZajzKQpGHsQ3B7CmGmhoybpknMoU/Vvp59SG4Ib0BT0EXZ5/dz+yTlmF5cy8kFoSoPIlflI5oXumqRLwESmJPtbiZ2GOClDICIVIvKCiHQYf8tXOWaTiPxIRC6IyHkR+WTcvs+KyDUROWW8Hk5GnlRjdyzvRujKgKpkiUiH+sWZGLprUl9eSH6Ox9X6z6Ssuyup8uVRUpCTFiOCp4AXlVLNwIvG55VEgN9VSrUAtwMfF5Hdcfv/Uil10Hi5plIZxGXBdHGPKBNDR02qfHmUFrq7WlymOiohlvxsu8sjh5arkmWg/per9dmQ6iZZQ/AI8FXj/VeBD6w8QCnVr5Q6abyfAi4A9Une1xbM+rluHhF0DoUpKcihypdZjjIw67e6u35uZwaG7sbTaGMI40bI5I4QxP4vOzqiyRqCaqVUP8QafCB4o4NFZCtwCHg9bvMnROSMiHx5tamluHOfEJHjInI8FLKnnqfZELn9QWgKpn9VpkSYIXRupStDQ3dNmoI+esZmmFu0J/nZeulazrpb6LQoltAU9BGammdi1tpqfTc1BCLyAxE5t8rrkfXcSER8wH8An1JKTRqbvwg0AgeBfuDPE52vlHpaKXVYKXU4EAis59ZJ4fYQ0lh5xMzsDYH7Qxi7QtMZOxqAmP6Vcm+1uK6habZWFZHjzcy4F/PZtno9x021p5S6Tym1d5XXs8CgiNQCGH+HVruGiOQSMwL/opR6Ju7ag0qpJaVUFPg74Egq/qlU0hj00T8xR3g+4rQo1zExu0hoaj4jHZUmbo5cUUplXNbXlbhZ/xCTa3tVJuvfnpxDyZrRY8DjxvvHgWdXHiCxOYt/AC4opf5ixb7auI8fBM4lKU/KMb+ISy7sEZkPZyYUrE/E2yGk7tN//8QcMwuZGbprsq2qGI+4M3JrbnGJKyPTNFdnrv43G9X6rJ6eTtYQfA64X0Q6gPuNz4hInYiYEUB3Ah8B3rNKmOifishZETkD3AP8TpLypBw394iWI1YyuEfaUF5EXo7HlZFbmVaVbDUKcr1sqihypf67Q9NEFTRX+50WxTLsqtaXlIdLKTUC3LvK9j7gYeP9T4FVPZlKqY8kc3872FIZSx3gRkPQGQqT6xU2ZaijDIwQxip3Rg61D04BsCODe6Tg3mpxHUPZo/9243+1isz0sKSQvBwPWyqKXNkQdQ7G5kcz1VFm0ujSyKGOwTCVxXlU+vKdFsVSmoI+uoenWXJZzqf2wanlQu+ZTFPQx5URa4tkZXYLkiKagr7l3p+baBucyuj5UZPGgI+eUfeFMLYPZYf+mwI+FiJResfsKZu4VjoGw2ytLCI/JzPKUybCrNZ3ddQ6P4E2BGtgZ42fyyMzthWSXgvT8xF6x2bZmcHzoyZNRv3WS8PucRgrpegcDLMjC/Rvpm9w26isYyhMczAL9L+cakUbAkdprvazFFWuiqU2p6oy2VFm0ujCamX9E3NMzUeyQv9uLBtqRgxlun8A7AlY0YZgDZi9bjdND7UZsuysyfyGqDHgQ1wWwrjsKM7giC2T0qJcqnz5rtJ/Vyic8RFDJnYUydKGYA1sqyomxyO0DbjHEHQMTpGf42FzRWZVxVqNglwvDeWFrkr10TEYeyizYWoI3JdzyDRK2aL/pqCPDm0InCUvx8O2qmLaB93TI2objK1ozbSqWIlodFkWzLbBKap8+ZRnWFWsRDQFY/p3S5GmbIkYMmkO+ukYmrKsWp82BGtkR43fVVNDHYNTWdMbgljkSnco7JoQxpj+M39ayKQp6GNidpHhsDtyPrUbEUN5OdnRhO2s8TG3GKXHosit7NBiCtgR9NMzNsPMgvM5hyZmF+mfmMuK0EWTxqCP+UiUvvFZp0UhGlV0DGVHxJCJ21bYZ1tHqHnZT2mN/rUhWCM7a2JZGN0wPdFprDLMhtBREzfVz702PsvMwlJWGWI36X9uMVYeNBscxSbNhv6tmpXQhmCNWG2R10N7ljkq4e3Eem7okb6d2iB79F9bWkBRntcVhsCMGMqmqTl/QS71ZYXaEDjNlorYfKQb/ARtA1MU5XmpL8vcHEMrKS/Oo7I4zxX6XzbEWbCYySRWpMkdqT6yLWLLZEe1z7LIRW0I1kiO10NTwLovYj10DE3RHPThyZKIIZNdtX4uukD/7YNTBP35lBblOi2KrZiRQ07TPjhFjkfYWpkdEUMmO6r9dIemiSylPueQNgTrYEe1jw4X9EjbBsJZNT9qsqumhLaBKccjhzoGw1mxkG8lO6r99E/MMTFjbdnEm9ExFGZrVXHWRAyZ7Kj2s7AU5fJI6iOHskuTSbKjxk/fxByTc849CGPTCwyH57PKUWzSUlvCfCTK5RHnFjZFo4rOLMlxs5KW2tj/fHFg8iZHWku2he6amFNhVpStTMoQiEiFiLwgIh3G31WLz4vIZaMAzSkROb7e892COSfc4aDD2Jwjz6aIFZNdRi/8Qr9zDVHv2Cyzi0tZ2RDtri0BnNX/zEKEK6MzWecfgNjU6KnP3M8De2pSfu1kRwRPAS8qpZqBF43PibhHKXVQKXV4g+c7jjkd4KTDsj2LcgytxFxJfbHfOf23LRvi7NN/wJ9PRXEeFxzU/8WBKZR62yhlE7leD2VF1qxkT9YQPAJ81Xj/VeADNp9vK/VlhRTleR02BGH8+TnUlBQ4JoNTFOR6aQwUOzo10do3icjbo5NsQkRoqfVzwWH9A+yuyz5DYCXJGoJqpVQ/gPE3mOA4BXxfRE6IyBMbON8VeDxCc9DZyKEL/ZPsrPEjkl0RQya7akoc7ZG29k+wrbKY4vykqrymLS2Gw96KyJW1cKF/kpKCnKwKnbaDmxoCEfmBiJxb5fXIOu5zp1LqFuAh4OMictd6BRWRJ0TkuIgcD4VC6z09ZbTUltDaP+lI8q1oVHGhf5I9Wdwb2lXr59r4LBOzzjjsW/snaclq/TvrsG/tn6SltiRrO0JWcVNDoJS6Tym1d5XXs8CgiNQCGH+HElyjz/g7BHwLOGLsWtP5xrlPK6UOK6UOBwKB9fyPKWVPXQnjM4v0TczZfu/LI9NMLyyxp67U9nu7hRZjbtiJUdnE7CI9o7NZOT9tYkYOOTEqW4oq2gamln8DmtSR7NTQMeBx4/3jwLMrDxCRYhHxm++BB4Bzaz3fbew2GuHz1yZsv3drv54fbamJ/e9O+Akuav3TFPSR4xFHIoeujEwzs7CU1fq3imQNweeA+0WkA7jf+IyI1InIc8Yx1cBPReQ08AbwHaXU9250vptpqfUjAuf77H8QzvdNkuORrAwdNakuyaesKNeRhsg0xHuyuEean+OlKehzVP/ZPCKziqQ8XkqpEeDeVbb3AQ8b77uBA+s5380U5eWwvarYMUPQXO0nP8dr+73dgojQ4pDDuLVvkipfHgF/vu33dhO7avy81j1q+30v9OuOkFXolcUbYE9dKa19DkwN9U3q3hAxh3HbgHXVmhKhHZUxWmpLGJicY2za3iI1rX2TNAV9Wd0RsgptCDbAnroS+ibsfRCGJucYDs9ndcSQSUtNCbOLS1wZtaZa02osRKJ0DIb1/DRvO+ztXk9woV87iq1CG4INYEbt2Dk9dN6cn9YNEbvMnDc2zlN3DoVZWIpmdcSWybIhsHF6bnR6gYHJOT0itghtCDaA2Rift3F6yFxRmc0x7CY7qv14BC7YGEKqHZVvE/DnU+XLs9VhbN5LjwisQRuCDVBenEddacFy42AH5/sm2FxRRElBduXAX42CXC/bAz5b/TStfZMU5HrYVpVdOfAT0VJbYqshWO4I1WZfag870IZgg+yuK7V1akg7in+effWlnLVxLUdr/wS7akrwZlkxoES01JbQMRhmIWJPqonW/kmqS/Kp9GV3xJZVaEOwQfbUldAdCjO7sGT5vabmFrk8MqP9A3HsrS9lcHKeoUnrV3grpWKGWOt/mb31pSwsRW1LwHihX3eErEQbgg2yu66EqLIncsJ0yu2p1w+Cyf6GmNPWjlHBtfFZJuciuiGKY3+9ffqfW1yicyis/QMWog3BBnnbYWy9ITDnwnfX6ogVk921JYjAmV7rG6LzfdpRuZItlUWUFOTYov/W/kkiUcX+hjLL75WtaEOwQerLCiktzLXFYXm+b5LK4jyqS/T8qElxfg5NAR/nbOiRnukdx+sRPTUXh4iwv6GMs9fGLb/XmZ7YPQ5s0h0hq9CGYIOICHvrSzh3zfoRwZneCfbUl2b9itaV2OUwPt0zwa4aPwW5ekVrPPsaSmkbmGJu0Vo/2eneCQL+/KwsxmQX2hAkwf6GMi70T1r6IITnI7QPTXFwkx4Wr2RfQylDU/MMWJgSPBpVnO4d54DW/3UcaChlcUlZnhL8dO84BxrKdEfIQrQhSIKDm8qIRJWlC8vO9k6gFBzarBuilZiN86ke66YnLo1MMzUX0YZ4FfYZc/Zneq3T/+TcIt2haQ406GkhK9GGIAkOGY3DW1etexDMRu6gdpRdx566EvK8Ht66OmbZPU6b+teG4DrqSguo8uXzloWG+JzhjN6v9W8p2hAkQbCkgPqyQksfhFM9Y2ytLKK8OM+ye6Qr+TledteVWGqIT/eMU5znpTGgUx+vRES4ZXOZtR0hY7RhhqtqrEEbgiQ5uLmMUxaPCHRvNDG3bC7nzLVxFi0qpn6qd4J9DaV6RXECbtlSzqXhaUbC85Zc/+SVMbZXFeuOkMUkZQhEpEJEXhCRDuNv+SrH7BSRU3GvSRH5lLHvsyJyLW7fw8nI4wSHNpVxbXzWkhWu/ROzDE7Oa0NwAw5tLmNuMcpFCzJhzi4s0do3waHN1/2sNQa3GLqxYlSglOLElTFu3aL1bzXJjgieAl5USjUDLxqffw6lVJtS6qBS6iBwKzBDrIC9yV+a+5VSz6083+2YjfRJCx6EE1dic9+6IUrMLUYjcdICP8Hp3nEWlxS3bdX6T8T+hlJyPMIJC/TfFZpmbGaRw1r/lpOsIXgE+Krx/qvAB25y/L1Al1LqSpL3dQ1760vJy/Fw/HLqS/e9cWmUojyvXsh0A+pKCwj68y0xBG9ein2nt26uSPm1M4WC3Njv8+SV1Ov/xBVD/1u0/q0mWUNQrZTqBzD+Bm9y/KPA11Zs+4SInBGRL682tWQiIk+IyHEROR4KhZKTOoUU5Ho5uKmMNywyBLduKSfHq105iRARbttawRuXRlEqtaUr37wyxs5qP6VFOvX3jTi0udwYPaXWT3P88hjlRbk0BnTqb6u5aQsjIj8QkXOrvB5Zz41EJA94P/DNuM1fBBqBg0A/8OeJzldKPa2UOqyUOhwIBNZza8s5uq2Cc9cmCM9HUnbNiZlF2ganuG2r7g3djKPbK+ifmKNndDZl11yKKk5eGdPTEmvgyLYK5hajKc87ZPoH9EIy67mpIVBK3aeU2rvK61lgUERqAYy/Qze41EPASaXUYNy1B5VSS0qpKPB3wJHk/h1nOLKtgqgipcPj41dGUQptCNbA0W2VALx2aSRl17w4MEl4PqL1vwaObovp6LXu1Ol/ODxP9/C0nhayiWTnHI4BjxvvHweevcGxj7FiWsg0IgYfBM4lKY8j3LK5HK9HeONS6qaH3rg8Sq5X9IriNdAc9FFRnMfr3SnUv/Fd6hHBzan05bOz2p9SQ/Czrti13tFYmbJrahKTrCH4HHC/iHQA9xufEZE6EVmOABKRImP/MyvO/1MROSsiZ4B7gN9JUh5HKM7PYW9dCa+nsEf6evco+xvKdKKzNeDxCEe2VqRU/690DrOlsoiG8qKUXTOTuX17Bccvj6WsYtmrXcP4C2LPlcZ6kjIESqkRpdS9Sqlm4++osb1PKfVw3HEzSqlKpdTEivM/opTap5Tar5R6v+l4Tkdub6zkVM840ynwE0zMLHKmd5w7m6pSIFl2cHR7Bb1js1wbT95PsLgU5bXuUa3/dfCOxkpmF5dSlpb6lc4Rjm6r1IESNqG1nCLuag6wuKR4tSv5XunPuoaJKrirWTdEa+WOxpiuftqRfETZmd5xwvMR3qkNwZo5YvhpUvH77xmd4eroDHc26Wkhu9CGIEUc3lpOYa6Xn6SgIXq5Yxhffo5OfbwOdlT7qCkp4KX25PX/044RROAOPT+9ZiqK89hTV8LL7cNJX8s0JnpEZh/aEKSI/Bwvt2+v4OWO5B4EpRQ/6QjxjsZKcvWweM2ICHfvDPCTjmEiScazv9I5zL76UsqKdH6b9XDPziAnro4xMbOY1HVe6ggR8OfTHNSJ/uxCtzQp5F3NAS4NT9MzOrPha1wZmaF3bFZPC22Ad+8IMDUXSSob7MTMIievjulpoQ1wz64gS1HFy0mMihciUV5uC/GenUG9fsBGtCFIIe/eGVvo9qO2Gy2nuDE/uBBbZnHXDnctmksH7miqwusRXmrbeEP0o7YhIlHF/burUyhZdnBwUxllRblJ/f7fvDzK1HyEe1tulqRAk0q0IUgh26uK2R4o5nvnBjZ8jefPD7Crxs+WSr2sfr2UFuZy6+byZWO6Eb7fOkDQn88BXQho3Xg9wrt3BHipLUQ0urF0Hy+0DpKf4+GdekRsK9oQpBAR4aG9Nbx+aZTR6YV1nz80NcfxK2M8tLf25gdrVuWhfTVcHJiicyi87nPnFpd4qS3Efbur8ej6Axvi3pZqRqYXOL6BVfZKKV68OMidTVUU5eVYIJ0mEdoQpJiH9tayFFW80Lr+UcELrYMoBQ/urbFAsuzgob21iMBzZ9e/JOXVrhGmF5Z4QE8LbZh7dwUpyPVw7PS1dZ97vm+SntFZ7mvR+rcbbQhSzJ66EjZVFPLdDUwPfffsANuqitlRraMlNkpNaQG3bangO2fWbwiePXUNf0GOTmuQBMX5OdzXUs1zZwfWnY30W29dI9crPLxPd4TsRhuCFCMivHdfHT/tGF5X1bJr47O80jXM+/bX6miJJHnv/lraBqdoH1x71bLJuUW+e26A9x+oIz9Hp/VIhvcfqGN0eoFXOtceSh1ZivLsqT7esyuow3YdQBsCC/jwbZuIRBXfPNG75nO+8WYPAL962yarxMoa3ru/llyv8LU3rq75nG+f7mc+EuVXD2v9J8u7dwYoKcjhmZNrnx76Secww+F5PniowULJNInQhsACtlUVc0djJV974+qaoieWoopvHu/hXc0BneQsBVT58nl4Xy3/fqJ3zbmfvnmihx3VPvY3lFosXeaTn+Pll25t4Lmz/QyucVT8jTd7KCvK5Z5dOmzaCbQhsIjHjmymd2yWl9awuOaHF4fom5jjMT0aSBkffccWpuYi/Oepm/dKT1wZ462r43z4ts16Wi5F/MYdW1lSin9+7eZVabtDYb53foBfO7pZT8s5hDYEFvELe2qoKSng8y923LCEolKKv/pBO5sqCrlPR6ukjFs2l7O7toR/+Omlm6ac+OsfdlBRnMdjR7QhThVbKou5d1c1//L6VWYXlm547NMvd5Pn9fAbd2yzSTrNSrQhsIi8HA+fuq+Zt66O80Jr4gVO3zs3wPm+ST517w6dWyiFiAifvK+Z7tA0XzP8L6txumecH7eF+K13bdOx6ynmf3/3dkanF/jSS10Jj+kZneGZk9f41cObCPjzbZROE49ueSzkl29tYHtVMX/yvYvMLV7fK5qej/Cnz7fRGCjmA4fqHZAws3lgdzVHt1XwVy+0Mzl3fSK0hUiU//s/z1FelMtHbt/igISZzW1bK/jF/bV88aUuro5cn39LKcUfPHOWXK/w5N2NDkioMUnKEIjIr4jIeRGJisjhGxz3oIi0iUiniDwVt71CRF4QkQ7jb0bVBczxevjM+3bTFZrm098693NTREop/q9vneXKyDR//MhevHola8oREf6fX9zN+Owin/r6qeumiP7qB+2cvTbB//zQfvwFuQ5Jmdl8+r0t5HiE3/3mqeumiP759av8tHOYpx5uob6s0CEJNZD8iOAc8CHg5UQHiIgX+AKx4vW7gcdEZLex+yngRaVUM/Ci8TmjuHtnkE/e28x/nOzls8fOE56PMD0f4dP/eY5nT/XxO/ft4A6d6dIy9taX8keP7OGHF4f4vW+eZiQ8z+JSlM999yJ/++MuPnx4k17JbSG1pYV87pf2c/zKGE/803H6J2ZZiET5+59085lnz/Gu5ip+7chmp8XMepKaFFVKXQBuFmlxBOhUSnUbx34deARoNf7ebRz3VeDHwO8nI5Mb+eS9zUzMLvKVn13ma2/0oFBEoor/7V3b+Pg9TU6Ll/H82tEthKbm+fyLHTx3doCoiun/vxzdzB++b/fNL6BJivcfqGN2IcIfPHOWOz/3Q/JyPMwtRnlwTw1/9ehBndfJBdjhHasH4r11vcBR4321WadYKdUvIglzz4rIE8ATAJs3p1cPwuMRPvv+PbzvQB3fPduP1ys8sLuaW7dUOC1a1vCp+3bwi/vr+NfXr1KQ6+G2rRXcs0unOraLD9+2mTsaq/jG8R7C8xFu317JfS3VekrUJdzUEIjID4DVxs6fVko9u4Z7rPZNrztHrVLqaeBpgMOHD28sx63D3LqlnFu3ZJQbJK1oCvr4jB4BOMamiiJ+94GdTouhWYWbGgKl1H1J3qMXiA/QbgD6jPeDIlJrjAZqgY1XtNBoNBrNhrAjfPRNoFlEtolIHvAocMzYdwx43Hj/OLCWEYZGo9FoUkiy4aMfFJFe4B3Ad0TkeWN7nYg8B6CUigCfAJ4HLgDfUEqdNy7xOeB+EekA7jc+azQajcZG5EbpD9zK4cOH1fHjx50WQ6PRaNIKETmhlLpuzZdeWazRaDRZjjYEGo1Gk+VoQ6DRaDRZjjYEGo1Gk+WkpbNYRELAzSterE4VsPZiqvbhVrnAvbJpudaHW+UC98qWaXJtUUpdVwYuLQ1BMojI8dW85k7jVrnAvbJpudaHW+UC98qWLXLpqSGNRqPJcrQh0Gg0miwnGw3B004LkAC3ygXulU3LtT7cKhe4V7askCvrfAQajUaj+XmycUSg0Wg0mji0IdBoNJosJyMNgYj8ioicF5GoiBxese8PRKRTRNpE5BcSnF8hIi+ISIfxN+XVZETk30TklPG6LCKnEhx3WUTOGsfZkmlPRD4rItfi5Hs4wXEPGnrsFBHL602LyJ+JyEUROSMi3xKRsgTH2aKzm/3/EuPzxv4zInKLVbLE3XOTiPxIRC4Yz8AnVznmbhGZiPt+P2O1XMZ9b/i9OKEv474743Rx24NICAAABE5JREFUSkQmReRTK46xRWci8mURGRKRc3Hb1tQeJfU8KqUy7gW0ADuJ1UA+HLd9N3AayAe2AV2Ad5Xz/xR4ynj/FPAnFsv758BnEuy7DFTZrL/PAr93k2O8hv62A3mGXndbLNcDQI7x/k8SfS926Gwt/z/wMPBdYlX6bgdet+G7qwVuMd77gfZV5Lob+Ladv6m1fC9O6CvB9zpAbOGV7ToD7gJuAc7Fbbtpe5Ts85iRIwKl1AWlVNsqux4Bvq6UmldKXQI6gSMJjvuq8f6rwAeskTTWCwJ+FfiaVfewiCNAp1KqWym1AHydmN4sQyn1fRWrbwHwGrFqd06xlv//EeAfVYzXgDKjEp9lKKX6lVInjfdTxGqA1Ft5zxRiu75W4V6gSym10cwFSaGUehkYXbF5Le1RUs9jRhqCG1AP9MR97mX1h6RaKdUPsQcLsLLK+buAQaVUR4L9Cvi+iJwQkScslGMlnzCG519OMBRdqy6t4jeJ9R5Xww6dreX/d1RHIrIVOAS8vsrud4jIaRH5rojssUmkm30vTv+mIFZBMVGnzAmdwdrao6R0d9OaxW5FRH4A1Kyy69NKqUQlL2WVbZbFz65Rxse48WjgTqVUn4gEgRdE5KLRa7BMNuCLwB8T080fE5u6+s2Vl1jl3KR1uRadicingQjwLwkuY4nOVoq6yraV/7+tv7efu7GID/gP4FNKqckVu08Sm/oIG/6f/wSabRDrZt+LY/oCkFgp3fcDf7DKbqd0tlaS0l3aGgKl1H0bOK0X2BT3uQHoW+W4QRGpVUr1G0PTIStkFJEc4EPArTe4Rp/xd0hEvkVsCJh0o7ZW/YnI3wHfXmXXWnWZUrlE5HHgF4F7lTE5uso1LNHZCtby/1uio5shIrnEjMC/KKWeWbk/3jAopZ4Tkb8VkSqllKXJ1dbwvTiirzgeAk4qpQZX7nBKZwZraY+S0l22TQ0dAx4VkXwR2UbMor+R4LjHjfePA4lGGMlyH3BRKdW72k4RKRYRv/memLP03GrHppIV87IfTHDPN4FmEdlm9KQeJaY3K+V6EPh94P1KqZkEx9ils7X8/8eAjxrRMLcDE+YQ3yoMn9M/ABeUUn+R4Jga4zhE5AixdmDEYrnW8r3Yrq8VJBydO6GzONbSHiX3PFrtBXfiRazx6gXmgUHg+bh9nybmXW8DHorb/vcYEUZAJfAi0GH8rbBIzq8AT67YVgc8Z7zfTsz7fxo4T2x6xA79/RNwFjhj/JhqV8pmfH6YWFRKlx2yEXPu9wCnjNeXnNTZav8/8KT5nRIbrn/B2H+WuAg2C2V6J7EpgTNxenp4hVyfMHRzmpjT/Q4b5Fr1e3FaX3HyFRFr2EvjttmuM2KGqB9YNNqwjyVqj1L5POoUExqNRpPlZNvUkEaj0WhWoA2BRqPRZDnaEGg0Gk2Wow2BRqPRZDnaEGg0Gk2Wow2BRqPRZDnaEGg0Gk2W8/8DSqD8M+WcWt0AAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "y = x.sin()\n", "pl.plot(x.numpy(), y.numpy())" @@ -1354,32 +762,9 @@ }, { "cell_type": "code", - "execution_count": 75, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 75, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deZBc9Xnu8e8zm/Z9RzsgC4RZLMbCQGIbGwgQJzKpJBfi65AYX4WUqcS5yS2T8i1fV7luynEqScUJgcgJZZzYJtexiRVbNmDZjuNgMCMihIQkNCMEGjTSjPYZbaOZee8ffYSbpmdTL6d7+vlUdfU55/c73e+cXp45S5+jiMDMzGpXXdoFmJlZuhwEZmY1zkFgZlbjHARmZjXOQWBmVuMa0i7gQsyePTuWLVuWdhlmZlVl8+bNhyJiTu70qgyCZcuW0dLSknYZZmZVRdKr+aZ705CZWY1zEJiZ1TgHgZlZjXMQmJnVOAeBmVmNK0oQSHpEUqekbYO0S9LnJbVK2ippdVbbbZJ2JW0PFKMeMzMbuWKtEXwRuG2I9tuBFcltHfAQgKR64MGkfRVwt6RVRarJzMxGoCi/I4iIH0laNkSXtcCXInPO62ckTZe0AFgGtEbEHgBJjyV9XypGXWY2cr19A5w820fP2T66z/RxsreP3r4BzvUP0Ncf9A0M0Nsf9CXj5wYy9/0DQQDnT2kfAUEk928eJ+mXr60iT4hfgafpv3P1IpbPnlTUxyzXD8oWAvuyxtuTafmmX5fvASStI7M2wZIlS0pTpdkYdeZcP62dPew60M2+o6c4eOIMB46f4cCJs3R1n+HEmcyXvr2VlHYFb7Z66YyqDYJ8izKGmP7WiRHrgfUAzc3NlRfTZhXk2KleftJ2mKfbDvPMnsO0dfUwkPWpmT25iXlTx3PRtPFcs3gaUyc0MmVcA5PGNTB5XANTxmeGm+rraKivo7FeNNTV0dSQuW+oF431dTTUifo6IQTKfGkKkJTcg9AbX6bZ42/pV2nfuDWkXEHQDizOGl8E7AeaBpluZqN0rn+AH+zs5Gub2/nBzk76BoKJTfW8c9lM7rhyAZfNn8KKeVNYMnMiTQ0+YNB+plxBsAG4P9kHcB1wPCI6JHUBKyQtB14H7gJ+o0w1mY0JZ8718+VnX+Pv/r2Nzu6zzJ48jnt/bjm3XjGfqxZNo7HeX/o2tKIEgaSvAu8FZktqB/4P0AgQEQ8DG4E7gFbgFPDbSVufpPuBJ4B64JGI2F6MmsxqwQ92dvKpDdvYd+Q01188iz+580res3KOv/xtVIp11NDdw7QH8LFB2jaSCQozG6Ez5/r5zLde4svPvsalcyfz5Y9ex42Xzk67LKtSVXkaarNadqjnLPd+8TleaD/O77z7Yv7w1pXe5m8FcRCYVZH9x05z9xee4eCJM6z/8LXcesX8tEuyMcBBYFYlDvWc5b//w7Mc6enlK//jXaxeMiPtkmyMcBCYVYGzff189NEW9h87zT/ee51DwIrKQWBWBT7zrZfYsu8YD31oNe9cNjPtcmyM8R4mswr33W0d/NMzr/E777mY269ckHY5NgY5CMwq2NGTvfzvf93O2xdO5Y9uXZl2OTZGedOQWQX7vxt3cOxUL1/6yBr/SMxKxu8sswq17fXj/Mvmdu79+eWsumhq2uXYGOYgMKtAEcGfbNzBzElNfOymS9Mux8Y4B4FZBfpx6yGebjvM773vUqaOb0y7HBvjHARmFehvf9DG/Knj+Y3rlqZditUAB4FZhdmy7xg/2XOYe39uuc8hZGXhd5lZhfm7f29j6vgG7r7Ol2S18nAQmFWQgyfO8ORLB7n7uiVMHueju608HARmFeRrLfvoHwjufqfXBqx8ihIEkm6TtEtSq6QH8rT/L0lbkts2Sf2SZiZteyW9mLS1FKMes2o0MBB89af7uPHSWSybPSntcqyGFBwEkuqBB4HbgVXA3ZJWZfeJiD+LiGsi4hrgj4F/j4gjWV1uStqbC63HrFr9uPUQrx87zd1rvDZg5VWMNYI1QGtE7ImIXuAxYO0Q/e8GvlqE5zUbUza8sJ8p4xu4ZdW8tEuxGlOMIFgI7Msab0+mvYWkicBtwNezJgfwpKTNktYN9iSS1klqkdTS1dVVhLLNKsfZvn6e2H6AX7hiPuMa6tMux2pMMYJAeabFIH1/CfjPnM1CN0bEajKblj4m6d35ZoyI9RHRHBHNc+bMKaxiswrzo5cP0X2mjw9c5dNMW/kVIwjagcVZ44uA/YP0vYuczUIRsT+57wQeJ7Opyaym/NsL+5kxsZEbL52ddilWg4oRBM8BKyQtl9RE5st+Q24nSdOA9wDfzJo2SdKU88PArcC2ItRkVjV6+wbYtOMgv3DFfJ9q2lJR8C9WIqJP0v3AE0A98EhEbJd0X9L+cNL1TuDJiDiZNfs84HFJ52v5SkR8t9CazKrJT185wsnefm6+3DuJLR1F+eliRGwENuZMezhn/IvAF3Om7QGuLkYNZtVq086DjGuo82YhS43XQ81SFBF8f2cnN1wyiwlNPlrI0uEgMEvRnkMnefXwKd532dy0S7Ea5iAwS9EPd2V+E3OTg8BS5CAwS9HTrYdYPnsSi2ZMTLsUq2EOArOU9PUP8OwrR7j+kllpl2I1zkFglpKtrx+n52wfN17io4UsXQ4Cs5T8pO0wAO+6eGbKlVitcxCYpeTptkNcNn8KsyaPS7sUq3EOArMU9PYN0LL3KDd4s5BVAAeBWQq27z/O2b4B3rlsRtqlmDkIzNKw+dWjAFy71EFg6XMQmKWgZe9RFs+cwNyp49MuxcxBYFZuEUHLq0dpXuqjhawyOAjMyuy1I6c41HPWm4WsYjgIzMqsZW9m/0CzdxRbhXAQmJXZln3HmDyugRVzp6RdihlQpCCQdJukXZJaJT2Qp/29ko5L2pLcPjXSec3Gmq3tx3j7wqnU1yntUsyAIlyhTFI98CBwC5kL2T8naUNEvJTT9T8i4gMXOK/ZmNDbN8COjm5++8ZlaZdi9oZirBGsAVojYk9E9AKPAWvLMK9Z1dl1oJve/gGuXDQt7VLM3lCMIFgI7Msab0+m5bpe0guSviPpilHOi6R1kloktXR1dRWhbLPye6H9GABXL5qeciVmP1OMIMi3oTNyxp8HlkbE1cBfA/86inkzEyPWR0RzRDTPmTPngos1S9PW9mPMmNjIohkT0i7F7A3FCIJ2YHHW+CJgf3aHiDgRET3J8EagUdLskcxrNpZsbT/OVYumI3lHsVWOYgTBc8AKScslNQF3ARuyO0iar+SdL2lN8ryHRzKv2Vhx5lw/uzt7uMr7B6zCFHzUUET0SbofeAKoBx6JiO2S7kvaHwZ+FfhdSX3AaeCuiAgg77yF1mRWiXYf7KF/IFi1YGrapZi9ScFBAG9s7tmYM+3hrOG/Af5mpPOajUU7Ok4AcJmDwCqMf1lsViY7DpxgQmM9S2ZOTLsUszdxEJiVyc6OblbOn+JfFFvFcRCYlUFEsOPACS5f4PMLWeVxEJiVwcETZzl26hyXe/+AVSAHgVkZ7DiQ7Cie7yCwyuMgMCuD80cMrZzvTUNWeRwEZmWws6ObhdMnMG1CY9qlmL2Fg8CsDHZ0eEexVS4HgVmJnTnXz55DJ72j2CqWg8CsxFo7M6eW8I5iq1QOArMSO7+j2JuGrFI5CMxKbOeBbsY31rF01qS0SzHLy0FgVmI7Ok6wcp5PLWGVy0FgVkIRkRwx5P0DVrkcBGYldKinl6OnzvmHZFbRihIEkm6TtEtSq6QH8rR/SNLW5Pa0pKuz2vZKelHSFkktxajHrFK0dvYAcOncySlXYja4gi9MI6keeBC4hcw1iJ+TtCEiXsrq9grwnog4Kul2YD1wXVb7TRFxqNBazCpNW5eDwCpfMdYI1gCtEbEnInqBx4C12R0i4umIOJqMPkPmIvVmY15rZw+TmuqZP3V82qWYDaoYQbAQ2Jc13p5MG8y9wHeyxgN4UtJmSesGm0nSOkktklq6uroKKtisXNq6erhk7mQkHzFklasYQZDvHR55O0o3kQmCT2RNvjEiVgO3Ax+T9O5880bE+ohojojmOXPmFFqzWVm0dfZwyRxvFrLKVowgaAcWZ40vAvbndpJ0FfD3wNqIOHx+ekTsT+47gcfJbGoyq3onz/ax//gZ7x+wileMIHgOWCFpuaQm4C5gQ3YHSUuAbwAfjoiXs6ZPkjTl/DBwK7CtCDWZpW5P10kALpnjXxRbZSv4qKGI6JN0P/AEUA88EhHbJd2XtD8MfAqYBfxtsq20LyKagXnA48m0BuArEfHdQmsyqwStXd2AjxiyyldwEABExEZgY860h7OGPwp8NM98e4Crc6ebjQVtnSeprxNLZnqNwCqbf1lsViKtnT0snTWRpgZ/zKyy+R1qViJtXT5iyKqDg8CsBPr6B9h7+KT3D1hVcBCYlcBrR05xrj+8RmBVwUFgVgI+2ZxVEweBWQm0+TcEVkUcBGYl0NrZw7yp45gyvjHtUsyG5SAwK4HWrh5vFrKq4SAwK7KIYI9PNmdVxEFgVmSd3WfpPtvnNQKrGg4CsyJrS44Y8hqBVQsHgVmRtfrylFZlHARmRdbW2cPkcQ3MnTIu7VLMRsRBYFZkrb48pVUZB4FZkbV1nvQPyayqOAjMiqj7zDkOnPDlKa26FCUIJN0maZekVkkP5GmXpM8n7VslrR7pvGbV5GeXp3QQWPUoOAgk1QMPArcDq4C7Ja3K6XY7sCK5rQMeGsW8ZlXDJ5uzalSMNYI1QGtE7ImIXuAxYG1On7XAlyLjGWC6pAUjnNesarR19dBQJ5bMnJh2KWYjVowgWAjsyxpvT6aNpM9I5gVA0jpJLZJaurq6Ci7arBRaO3tYNnsSjfXe/WbVoxjv1nzHyMUI+4xk3szEiPUR0RwRzXPmzBlliWbl0drV4yOGrOoUIwjagcVZ44uA/SPsM5J5zarCuf4BXjt8yvsHrOoUIwieA1ZIWi6pCbgL2JDTZwPwm8nRQ+8CjkdExwjnNasKrx4+Sd+AL09p1aeh0AeIiD5J9wNPAPXAIxGxXdJ9SfvDwEbgDqAVOAX89lDzFlqTWRpaOzOHjnqNwKpNwUEAEBEbyXzZZ097OGs4gI+NdF6zatSWnGzuYq8RWJXxoQ1mRdLW2cOCaeOZPK4o/1+ZlY2DwKxIMkcMeW3Aqo+DwKwIIoK2Tl+n2KqTg8CsCA6cOMPJ3n7/hsCqkoPArAjakiOGLvEagVUhB4FZEbR2dgM+dNSqk4PArAjauk4yZXwDcyb78pRWfRwEZkXQmuwo9uUprRo5CMyKwIeOWjVzEJgV6Pjpc3R1n/X+AataDgKzAp0/tYTXCKxaOQjMCtTmy1NalXMQmBWotauHpvo6Fs+YkHYpZhfEQWBWoLbOHpbOmkiDL09pVcrvXLMCvXywh7fNm5J2GWYXzEFgVoBTvX3sO3rKQWBVraAgkDRT0lOSdif3M/L0WSzpB5J2SNou6fez2j4t6XVJW5LbHYXUY1ZurZ09RMDb5nlHsVWvQtcIHgA2RcQKYFMynqsP+MOIuBx4F/AxSauy2v8yIq5Jbr5SmVWVlw9mjhh623yvEVj1KjQI1gKPJsOPAh/M7RARHRHxfDLcDewAFhb4vGYVYffBbprq61g6c2LapZhdsEKDYF5EdEDmCx+YO1RnScuAdwDPZk2+X9JWSY/k27SUNe86SS2SWrq6ugos26w4dh3s5pK5k33EkFW1Yd+9kr4naVue29rRPJGkycDXgY9HxIlk8kPAJcA1QAfw54PNHxHrI6I5IprnzJkzmqc2K5ndB3u8f8Cq3rBX2Y6Imwdrk3RQ0oKI6JC0AOgcpF8jmRD4ckR8I+uxD2b1+QLwrdEUb5am7jPneP3YaX5j3pK0SzErSKHrsxuAe5Lhe4Bv5nZQ5ry8/wDsiIi/yGlbkDV6J7CtwHrMymZ3cmoJHzpq1a7QIPgscIuk3cAtyTiSLpJ0/gigG4EPA+/Lc5jo5yS9KGkrcBPwBwXWY1Y2uw9mrkrmTUNW7YbdNDSUiDgMvD/P9P3AHcnwj4G8V+uIiA8X8vxmadp1oIfxjXUsnuEjhqy6+VAHswu0u7ObFXOnUFfnq5JZdXMQmF2gXQe6vX/AxgQHgdkFOHaql87us94/YGOCg8DsArxxagmvEdgY4CAwuwAvnz9iyOcYsjHAQWB2AXYf7GbyuAYumjY+7VLMCuYgMLsAuw52c+ncyWR+L2lW3RwEZqMUEby0/wSrLpqadilmReEgMBul14+d5sSZPlYtcBDY2OAgMBull/ZnTp7rNQIbKxwEZqP0UscJJLjMRwzZGOEgMBul7ftPcPHsSUxsKuhUXWYVw0FgNkqZHcXT0i7DrGgcBGajcPxU5mI03lFsY4mDwGwUtnccB+DyBd4/YGNHQUEgaaakpyTtTu7zXnxe0t7kAjRbJLWMdn6zSvFieyYIrlo0PeVKzIqn0DWCB4BNEbEC2JSMD+amiLgmIpovcH6z1G1tP86iGROYOakp7VLMiqbQIFgLPJoMPwp8sMzzm5XV1tePcdUi7yi2saXQIJgXER0Ayf3cQfoF8KSkzZLWXcD8SFonqUVSS1dXV4Flm43ekZO97Dty2puFbMwZ9kBoSd8D5udp+uQonufGiNgvaS7wlKSdEfGjUcxPRKwH1gM0NzfHaOY1K4at7ccAvEZgY86wQRARNw/WJumgpAUR0SFpAdA5yGPsT+47JT0OrAF+BIxofrNKsLX9OBJcudBBYGNLoZuGNgD3JMP3AN/M7SBpkqQp54eBW4FtI53frFJsbT/OxbMnMWV8Y9qlmBVVoUHwWeAWSbuBW5JxJF0kaWPSZx7wY0kvAD8Fvh0R3x1qfrNKExFs2XeUqxd7/4CNPQWdLCUiDgPvzzN9P3BHMrwHuHo085tVmlcPn+JQTy/NS2emXYpZ0fmXxWYj0PLqUQCuXerfPNrY4yAwG4HNrx5h6vgGVsydnHYpZkXnIDAbgZa9R1m9dAZ1db5GsY09DgKzYRw71cvuzh6avVnIxigHgdkwnn/t/P4B7yi2sclBYDaMn7Qdpqm+jncs8aGjNjY5CMyG8XTbYVYvnc74xvq0SzErCQeB2RCOnuzlpY4T3HjJ7LRLMSsZB4HZEJ7Zc5gIuOHSWWmXYlYyDgKzIfxn2yEmNdX71NM2pjkIzIbwdOth1iyfSWO9Pyo2dvndbTaIvYdOsufQSd7ztjlpl2JWUg4Cs0F8f2fm8hjvu2xeypWYlZaDwGwQ39/ZyYq5k1kya2LapZiVlIPALI+es308+8ph3nf5oJfRNhszHARmefzHy12c6w/e781CVgMKCgJJMyU9JWl3cv+Ws3JJWilpS9bthKSPJ22flvR6VtsdhdRjVizffrGDWZOaWO3TSlgNKHSN4AFgU0SsADYl428SEbsi4pqIuAa4FjgFPJ7V5S/Pt0fExtz5zcrtVG8fm3Z0cvuV82nwYaNWAwp9l68FHk2GHwU+OEz/9wNtEfFqgc9rVjKbdnRy+lw/H7jqorRLMSuLQoNgXkR0ACT3w+1Zuwv4as60+yVtlfRIvk1L50laJ6lFUktXV1dhVZsN4d9e2M+8qeN45zKfdtpqw7BBIOl7krblua0dzRNJagJ+Gfha1uSHgEuAa4AO4M8Hmz8i1kdEc0Q0z5njH/hYaRw52csPd3Xxi1deRL2vRmY1omG4DhFx82Btkg5KWhARHZIWAJ1DPNTtwPMRcTDrsd8YlvQF4FsjK9usNL7xfDu9/QP8t3cuTrsUs7IpdNPQBuCeZPge4JtD9L2bnM1CSXicdyewrcB6zC5YRPCVn77G6iXTWTl/StrlmJVNoUHwWeAWSbuBW5JxJF0k6Y0jgCRNTNq/kTP/5yS9KGkrcBPwBwXWY3bBfvrKEfZ0neTuNUvSLsWsrIbdNDSUiDhM5kig3On7gTuyxk8Bbzmhe0R8uJDnNyumR/7zFaZNaOQXr1owfGezMcQHSZsBrZ09PPnSQX7z+qVMbCro/yOzquMgMAPW/6iNcQ11/NYNy9IuxazsHARW8149fJLH/+t1fr15MbMmj0u7HLOycxBYzfvcE7toqKvj/psuTbsUs1Q4CKymPf/aUb69tYN1776YuVPHp12OWSocBFazzvUP8MnHtzF3yjjWvfvitMsxS40Pj7Ca9fAP29jRcYL1H76WSeP8UbDa5TUCq0lb9h3j89/fzS9dfRG3XjE/7XLMUuUgsJpzqOcsv/tPm5k3dTyfWXtF2uWYpc7rw1ZTTp7tY92XWjhyspev/+4NTJ/YlHZJZqnzGoHVjNO9/fzOP25my75j/NVd7+DtC6elXZJZRfAagdWEIyd7uffR59iy7xh/9qtXc9vbvV/A7DwHgY15LXuP8Htf/S8On+zloQ9d6xAwy+EgsDGr+8w5/uKpl3n06b0snjmRr913PVctmp52WWYVx0FgY87hnrM89tw+vvAfezh++hwfum4Jn7jtMqaMb0y7NLOK5CCwMeF0bz8/bj3Ev2zex6YdnfQNBDetnMP/vGUlVy7yTmGzoRQUBJJ+Dfg0cDmwJiJaBul3G/BXQD3w9xFx/kpmM4F/BpYBe4Ffj4ijhdRkY9/AQPD6sdPsOtDNi68f5yd7DvNfrx3lXH8we3ITv3XDMn6tebEvN2k2QoWuEWwDfgX4u8E6SKoHHiRzqcp24DlJGyLiJeABYFNEfFbSA8n4JwqsySpcRHCuPzjXP0Bv30DmPhk+fa6f46fPceJ0HydOn+N4cjtyqpf9x07TcewM+46e4lRvPwASvP2iaXzkxuXccOlsbrhkFo31PirabDQKvVTlDgBJQ3VbA7RGxJ6k72PAWuCl5P69Sb9HgR9SwiD4/KbdbHhh/xvjEfGz4dzOMfho9nz55s1ujpzWyH3ctzzxyJ7nLY8zxPMMVV9uj7c+7uA1Dfe4uX0HBjIB0Ns/kFvAkOoEMyY2sWD6eJbOmsgNl85i5bwpvG3+FN42bwqTfZ4gs4KU4xO0ENiXNd4OXJcMz4uIDoCI6JA0d7AHkbQOWAewZMmFXVx87pRxrJyXs7lAeQfPP+dgXcnNvqHmfUtMvmXerL7DPm7++fLO+6bxYfoO2Tb4vMMts2z1daKpoY7G+jrGNdTRWC+a6utozJo2rqGeaRMamTahkakTGpg2oZHJ4xqG+2fDzAowbBBI+h6Q78DrT0bEN0fwHPk+wUP8H5xfRKwH1gM0NzePen6Au9Ys4a41FxYiZmZj1bBBEBE3F/gc7cDirPFFwPntMwclLUjWBhYAnQU+l5mZjVI59qo9B6yQtFxSE3AXsCFp2wDckwzfA4xkDcPMzIqooCCQdKekduB64NuSnkimXyRpI0BE9AH3A08AO4D/FxHbk4f4LHCLpN1kjir6bCH1mJnZ6Cn3yJRq0NzcHC0teX+yYGZmg5C0OSKac6f7gGszsxrnIDAzq3EOAjOzGucgMDOrcVW5s1hSF/DqBc4+GzhUxHKKpVLrgsqtzXWNTqXWBZVb21ira2lEzMmdWJVBUAhJLfn2mqetUuuCyq3NdY1OpdYFlVtbrdTlTUNmZjXOQWBmVuNqMQjWp13AICq1Lqjc2lzX6FRqXVC5tdVEXTW3j8DMzN6sFtcIzMwsi4PAzKzGjckgkPRrkrZLGpDUnNP2x5JaJe2S9AuDzD9T0lOSdif3M0pQ4z9L2pLc9kraMki/vZJeTPqV5Ux7kj4t6fWs+u4YpN9tyXJsTa45Xeq6/kzSTklbJT0uafog/cqyzIb7+5Xx+aR9q6TVpaol6zkXS/qBpB3JZ+D38/R5r6TjWa/vp0pdV/K8Q74uaSyv5HlXZi2LLZJOSPp4Tp+yLDNJj0jqlLQta9qIvo8K+jxGxJi7AZcDK8lcA7k5a/oq4AVgHLAcaAPq88z/OeCBZPgB4E9LXO+fA58apG0vMLvMy+/TwB8N06c+WX4XA03Jcl1V4rpuBRqS4T8d7HUpxzIbyd8P3AF8h8xV+t4FPFuG124BsDoZngK8nKeu9wLfKud7aiSvSxrLa5DX9QCZH16VfZkB7wZWA9uypg37fVTo53FMrhFExI6I2JWnaS3wWEScjYhXgFZgzSD9Hk2GHwU+WJpKM/8FAb8OfLVUz1Eia4DWiNgTEb3AY2SWW8lExJORub4FwDNkrnaXlpH8/WuBL0XGM8D05Ep8JRMRHRHxfDLcTeYaIAtL+ZxFVPbllcf7gbaIuNAzFxQkIn4EHMmZPJLvo4I+j2MyCIawENiXNd5O/g/JvIjogMwHC5hbwpp+HjgYEbsHaQ/gSUmbJa0rYR257k9Wzx8ZZFV0pMuyVD5C5r/HfMqxzEby96e6jCQtA94BPJun+XpJL0j6jqQrylTScK9L2u8pyFxBcbB/ytJYZjCy76OClt2w1yyuVJK+B8zP0/TJiBjskpfKM61kx8+OsMa7GXpt4MaI2C9pLvCUpJ3Jfw0lqw14CPgMmWXzGTKbrj6S+xB55i14WY5kmUn6JNAHfHmQhynJMsstNc+03L+/rO+3Nz2xNBn4OvDxiDiR0/w8mU0fPcn+n38FVpShrOFel9SWF4Ayl9L9ZeCP8zSntcxGqqBlV7VBEBE3X8Bs7cDirPFFwP48/Q5KWhARHcmqaWcpapTUAPwKcO0Qj7E/ue+U9DiZVcCCv9RGuvwkfQH4Vp6mkS7LotYl6R7gA8D7I9k4mucxSrLMcozk7y/JMhqOpEYyIfDliPhGbnt2METERkl/K2l2RJT05GojeF1SWV5Zbgeej4iDuQ1pLbPESL6PClp2tbZpaANwl6RxkpaTSfSfDtLvnmT4HmCwNYxC3QzsjIj2fI2SJkmacn6YzM7Sbfn6FlPOdtk7B3nO54AVkpYn/0ndRWa5lbKu24BPAL8cEacG6VOuZTaSv38D8JvJ0TDvAo6fX8UvlWSf0z8AOyLiLwbpMz/ph6Q1ZL4HDpe4rpG8LmVfXjkGXTtPY5llGcn3UWGfx1LvBU/jRubLqx04CxwEnshq+ySZveu7gNuzpv89yRFGwCxgE7A7uZ9Zojq/CNyXM+0iYGMyfDGZvf8vANvJbB4px/L7R+BFYGvyZlqQW1syfgeZo1LaylEbmZ37+4AtybVcSvcAAACJSURBVO3hNJdZvr8fuO/8a0pmdf3BpP1Fso5gK2FNP0dmk8DWrOV0R05d9yfL5gUyO91vKENdeV+XtJdXVn0TyXyxT8uaVvZlRiaIOoBzyXfYvYN9HxXz8+hTTJiZ1bha2zRkZmY5HARmZjXOQWBmVuMcBGZmNc5BYGZW4xwEZmY1zkFgZlbj/j8RFDZkH0CdqQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "y = x.tanh()\n", "pl.plot(x.numpy(), y.numpy())" @@ -1394,32 +779,9 @@ }, { "cell_type": "code", - "execution_count": 76, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 76, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD4CAYAAADsKpHdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXBd9X3n8ffHli38/CgbRzbYgHl0gsEexzQlJaEphGkxSUNrOhO8LbMOLOw203a30Mw2mXbYKc0mTGkLXVIYHprwsBCKJ4UmBPLULQEEGGywjQU2tmxZkp8l25Is6bt/3J/oRVzJsqR775H0ec3c0bnfc373fO+RdL/3nN/vnKOIwMzMbEy5EzAzs2xwQTAzM8AFwczMEhcEMzMDXBDMzCypKHcCAzV79uxYuHBhudMwMxtWXn311b0RUVVo3rAtCAsXLqSmpqbcaZiZDSuS3u9tng8ZmZkZ4IJgZmaJC4KZmQEuCGZmlpywIEhaIOknkjZJekvSH6b4TEnPSdqafs7Ia3ObpFpJWyRdkRdfJmlDmneXJKV4paTHUvwlSQuH/q2amVlf+rOH0AH8cUScB6wEbpZ0PnAr8HxELAaeT89J81YDFwBXAndLGpte6x5gLbA4Pa5M8RuAAxFxFnAncMcQvDczMzsJJywIEVEfEa+l6WZgE1ANrAIeTIs9CFyTplcBj0ZEW0RsA2qBFZLmAVMj4sXIXWL1oR5tul/rCeDy7r0HMzMrjZPqQ0iHci4CXgLmRkQ95IoGMCctVg3szGtWl2LVabpn/ENtIqIDOATMOpnczMxGutbjnfz1v27mjZ0Hi/L6/S4IkiYDTwJfjYjDfS1aIBZ9xPtq0zOHtZJqJNU0NTWdKGUzsxFlb0sbd//0XTbv6esjeOD6VRAkjSNXDL4bEd9P4YZ0GIj0szHF64AFec3nA7tTfH6B+IfaSKoApgH7e+YREfdGxPKIWF5VVfDMazOzEWv/kXYAZkwcX5TX788oIwH3AZsi4tt5s9YBa9L0GuDpvPjqNHJoEbnO45fTYaVmSSvTa17fo033a30JeCF8Kzczsw/ZlwrCrMnFKQj9uZbRp4AvAxskrU+xPwP+Cnhc0g3ADuBagIh4S9LjwNvkRijdHBGdqd1NwAPABODZ9IBcwXlYUi25PYPVg3xfZmYjzv6WXEGYOamyKK9/woIQEf9G4WP8AJf30uZ24PYC8RpgSYF4K6mgmJlZYQeOdheEMh0yMjOzbNh3pJ1xY8XUU4pzoWoXBDOzYWJ/SzszJo6nWKdpuSCYmQ0T+460F+1wEbggmJkNG/uPtLkgmJkZHDh63AXBzMxgX0sbs1wQzMxGt+OdXRxu7SjaOQjggmBmNiwcONJ9DsK4oq3DBcHMbBjYd6S4ZymDC4KZ2bDwH3sI7kMwMxvVin1hO3BBMDMbFvZ7D8HMzOA/9hCmT3CnspnZqLb/SBvTJ46jYmzxPrZdEMzMhoF9Le1FPSkNXBDMzIaFvS1tzJ5cvCGn0L9baN4vqVHSxrzYY5LWp8f27jupSVoo6VjevH/Ia7NM0gZJtZLuSrfRJN1q87EUf0nSwqF/m2Zmw9velnZmTylzQSB3y8sr8wMR8bsRsTQilgJPAt/Pm/1u97yIuDEvfg+wltw9lhfnveYNwIGIOAu4E7hjQO/EzGwE29vcRlW59xAi4ufk7nP8Eelb/u8Aj/T1GpLmAVMj4sWICOAh4Jo0exXwYJp+Arhcxbr7g5nZMNR6vJPmtg5mF/EcBBh8H8KlQENEbM2LLZL0uqSfSbo0xaqBurxl6lKse95OgIjoAA4BswqtTNJaSTWSapqamgaZupnZ8LC3pQ2g/H0IJ3AdH947qAdOi4iLgD8CvidpKlDoG3+kn33N+3Aw4t6IWB4Ry6uqqgaRtpnZ8LG3JXcOQrELwoDv1CypAvgisKw7FhFtQFuaflXSu8DZ5PYI5uc1nw/sTtN1wAKgLr3mNHo5RGVmNhrtbU57CBnoVO7NrwObI+KDQ0GSqiSNTdNnkOs8fi8i6oFmSStT/8D1wNOp2TpgTZr+EvBC6mcwMzPyDxmVuQ9B0iPAi8A5kuok3ZBmreajncmfBt6U9Aa5DuIbI6L72/5NwD8CtcC7wLMpfh8wS1ItucNMtw7i/ZiZjTil6kM44SGjiLiul/h/KhB7ktww1ELL1wBLCsRbgWtPlIeZ2Wi1t6WdKZUVnDJubFHX4zOVzcwybm9LW9H7D8AFwcws83KXrShu/wG4IJiZZd7elvai9x+AC4KZWeaV4sJ24IJgZpZpxzu7OHj0uAuCmdlot6/7LOUp7kMwMxvVmtJZysW+0im4IJiZZVrD4VYA5k49pejrckEwM8uwxrSHMGeq9xDMzEa1hsOtSMW/bAW4IJiZZVpjcxuzJo1n3Njif1y7IJiZZVhTcytVU4rffwAuCGZmmdZwuI25Jeg/ABcEM7NMa2xuZU4JLmwHLghmZpnV2RU0NbeVZMgpuCCYmWXWviNtdAXZ2UOQdL+kRkkb82LfkLRL0vr0uCpv3m2SaiVtkXRFXnyZpA1p3l3pVppIqpT0WIq/JGnh0L5FM7PhqfFw9zkI2dlDeAC4skD8zohYmh7PAEg6n9ytNS9Ibe7uvscycA+wltx9lhfnveYNwIGIOAu4E7hjgO/FzGxEaWzOnaWcmT2EiPg5sP9EyyWrgEcjoi0itpG7f/IKSfOAqRHxYkQE8BBwTV6bB9P0E8Dl3XsPZmajWfcewnDoQ7hF0pvpkNKMFKsGduYtU5di1Wm6Z/xDbSKiAzgEzCq0QklrJdVIqmlqahpE6mZm2deQCkIpzlKGgReEe4AzgaVAPfCtFC/0zT76iPfV5qPBiHsjYnlELK+qqjq5jM3MhpnG5lZmThrP+IrSjP8Z0FoioiEiOiOiC/gOsCLNqgMW5C06H9id4vMLxD/URlIFMI3+H6IyMxuxGg63laz/AAZYEFKfQLcvAN0jkNYBq9PIoUXkOo9fjoh6oFnSytQ/cD3wdF6bNWn6S8ALqZ/BzGxU23P4GPOmlab/AKDiRAtIegS4DJgtqQ74OnCZpKXkDu1sB74CEBFvSXoceBvoAG6OiM70UjeRG7E0AXg2PQDuAx6WVEtuz2D1ULwxM7Phbs+hVj5ePb1k6zthQYiI6wqE7+tj+duB2wvEa4AlBeKtwLUnysPMbDRp6+hkb0t7SfcQfKaymVkGdQ85PdUFwcxsdKs/lDspzXsIZmajXP2hY4ALgpnZqLcn7SGcOm1CydbpgmBmlkH1h1qZUlnB5MoTjv0ZMi4IZmYZVH/oWEk7lMEFwcwsk/YcanVBMDOz3CGjUnYogwuCmVnmHO/soqmlraQdyuCCYGaWOY3NbUSUdsgpuCCYmWVO/cHcOQjuQzAzG+V2pYIwf7oPGZmZjWp1B3IFoXqGC4KZ2ai26+AxZkwcx8TxpTspDVwQzMwyZ9eBYyXfOwAXBDOzzKk7cJTqEvcfQD8KgqT7JTVK2pgX+6akzZLelPSUpOkpvlDSMUnr0+Mf8tosk7RBUq2ku9KtNEm323wsxV+StHDo36aZ2fAQEew6eIzq6RNLvu7+7CE8AFzZI/YcsCQiPgG8A9yWN+/diFiaHjfmxe8B1pK7z/LivNe8ATgQEWcBdwJ3nPS7MDMbIfYfaaf1eBfzs3jIKCJ+Tu5ex/mxH0VER3r6S2B+X68haR4wNSJejIgAHgKuSbNXAQ+m6SeAy7v3HszMRpvuIafDtQ/hD4Bn854vkvS6pJ9JujTFqoG6vGXqUqx73k6AVGQOAbMKrUjSWkk1kmqampqGIHUzs2zZ1T3kNIt9CH2R9DWgA/huCtUDp0XERcAfAd+TNBUo9I0/ul+mj3kfDkbcGxHLI2J5VVXVYFI3M8ukD05KK8MewoAHuUpaA/wmcHk6DEREtAFtafpVSe8CZ5PbI8g/rDQf2J2m64AFQJ2kCmAaPQ5RmZmNFnUHjjFp/FimTRhX8nUPaA9B0pXAnwJXR8TRvHiVpLFp+gxyncfvRUQ90CxpZeofuB54OjVbB6xJ018CXuguMGZmo82ug7lzEMrRlXrCPQRJjwCXAbMl1QFfJzeqqBJ4LiX9yzSi6NPAX0jqADqBGyOi+9v+TeRGLE0g1+fQ3e9wH/CwpFpyewarh+SdmZkNQ3UHjjF/RumHnEI/CkJEXFcgfF8vyz4JPNnLvBpgSYF4K3DtifIwMxvpIoId+47wyUUzy7J+n6lsZpYR+4+0c6S9k9NmlmcPwQXBzCwjduzPdcm6IJiZjXLdBeH0WS4IZmaj2o59uYJQrk5lFwQzs4zYsf8oc6ZUMmH82LKs3wXBzCwjduw/WrbDReCCYGaWGTv2H2VBmTqUwQXBzCwTWo93sudwa9lGGIELgplZJuw6eIyI8o0wAhcEM7NM6B5h5D0EM7NR7r29RwBYNHty2XJwQTAzy4Bte1uYNmEcMyaW/rLX3VwQzMwyYNveIyyaPaksl73u5oJgZpYB25qOcMbsSWXNwQXBzKzMjrV3svtQK4tcEMzMRrft+1KHclXGC4Kk+yU1StqYF5sp6TlJW9PPGXnzbpNUK2mLpCvy4sskbUjz7kq30kRSpaTHUvwlSQuH9i2amWXb9g9GGGW8IJC77eWVPWK3As9HxGLg+fQcSeeTuwXmBanN3d33WAbuAdaSu8/y4rzXvAE4EBFnAXcCdwz0zZiZDUfdQ04Xzsp4QYiIn5O713G+VcCDafpB4Jq8+KMR0RYR24BaYIWkecDUiHgxIgJ4qEeb7td6Arhc5exmNzMrsW17jzB3aiWTKk94V+OiGmgfwtyIqAdIP+ekeDWwM2+5uhSrTtM94x9qExEdwCFgVqGVSlorqUZSTVNT0wBTNzPLlveaWsp+uAiGvlO50Df76CPeV5uPBiPujYjlEbG8qqpqgCmamWVHRFDb2MJZc8p3hnK3gRaEhnQYiPSzMcXrgAV5y80Hdqf4/ALxD7WRVAFM46OHqMzMRqSmljYOt3aweM6Ucqcy4IKwDliTptcAT+fFV6eRQ4vIdR6/nA4rNUtamfoHru/Rpvu1vgS8kPoZzMxGvNqGFgAWZ2AP4YQ9GJIeAS4DZkuqA74O/BXwuKQbgB3AtQAR8Zakx4G3gQ7g5ojoTC91E7kRSxOAZ9MD4D7gYUm15PYMVg/JOzMzGwa2NuYKwllzh0FBiIjrepl1eS/L3w7cXiBeAywpEG8lFRQzs9Fma2MzU0+poGpyZblT8ZnKZmbltLWhhcVzp5T1onbdXBDMzMqotrElE/0H4IJgZlY2+1ra2HekPRNDTsEFwcysbGpTh/LiueUfcgouCGZmZfNOQzOQjSGn4IJgZlY2b9c3M23COOZNO6XcqQAuCGZmZbN5z2HOm5eNEUbggmBmVhZdXcGWPc2ce+rUcqfyARcEM7My2LH/KEfbOzlvXjY6lMEFwcysLDbvOQzAefO8h2BmNqptqm9mjMjEVU67uSCYmZXBpvrDLJw9iQnjx5544RJxQTAzK4NNew5zXoY6lMEFwcys5A4ebWfn/mMsqZ5W7lQ+xAXBzKzENu7KdSh/fKQUBEnnSFqf9zgs6auSviFpV178qrw2t0mqlbRF0hV58WWSNqR5dykrZ2mYmRXBhl2HAFhSPUIOGUXElohYGhFLgWXAUeCpNPvO7nkR8QyApPPJ3Q3tAuBK4G5J3b0p9wBryd1yc3Gab2Y2Im3cdYgFMycwfeL4cqfyIUN1yOhy4N2IeL+PZVYBj0ZEW0RsA2qBFZLmAVMj4sV0L+WHgGuGKC8zs8x5c9fBzB0ugqErCKuBR/Ke3yLpTUn3S5qRYtXAzrxl6lKsOk33jJuZjThZ7VCGISgIksYDVwP/N4XuAc4ElgL1wLe6Fy3QPPqIF1rXWkk1kmqampoGlbeZWTl0dyh/onp6mTP5qKHYQ/g88FpENABERENEdEZEF/AdYEVarg5YkNduPrA7xecXiH9ERNwbEcsjYnlVVdUQpG5mVlpv1B0EstehDENTEK4j73BR6hPo9gVgY5peB6yWVClpEbnO45cjoh5olrQyjS66Hnh6CPIyM8uc13cc4IyqSZnrUAaoGExjSROBzwFfyQv/taSl5A77bO+eFxFvSXoceBvoAG6OiM7U5ibgAWAC8Gx6mJmNKBHBazsO8tlz55Q7lYIGVRAi4igwq0fsy30sfztwe4F4DbBkMLmYmWXd+/uOsv9IOxefNuPEC5eBz1Q2MyuR13ceAOCi07LXoQwuCGZmJfPa+weZXFnB2XOzc8nrfC4IZmYl8tqOA1y4YBpjx2Tz6jwuCGZmJdDS1sGm+sOZ7T8AFwQzs5Ko2b6froBPLpp14oXLxAXBzKwEXt62n4ox4uLTs9mhDC4IZmYl8dK2/Xx8/jQmjh/UaP+ickEwMyuyY+2dvFl3kBWLZpY7lT65IJiZFdnrOw5wvDNYmeH+A3BBMDMrul++t48xgmULszvCCFwQzMyK7he1e7lwwXSmnjKu3Kn0yQXBzKyIDh09zhs7D3LpWbPLncoJuSCYmRXRi+/tpSvgVxdn/x4uLghmZkX0i617mTR+bGYvaJfPBcHMrIj+rXYvK8+Yxbix2f+4zX6GZmbD1HtNLby/7yifPjv7h4tgkAVB0nZJGyStl1STYjMlPSdpa/o5I2/52yTVStoi6Yq8+LL0OrWS7kq30jQzG9ae39QIkNk7pPU0FHsIn4mIpRGxPD2/FXg+IhYDz6fnSDofWA1cAFwJ3C1pbGpzD7CW3H2WF6f5ZmbD2vObGzhn7hQWzJxY7lT6pRiHjFYBD6bpB4Fr8uKPRkRbRGwDaoEVkuYBUyPixYgI4KG8NmZmw9KhY8d5ZfsBLj9veOwdwOALQgA/kvSqpLUpNjci6gHSz+6tUQ3szGtbl2LVabpn/CMkrZVUI6mmqalpkKmbmRXPz95porMrhlVBGOxl9z4VEbslzQGek7S5j2UL9QtEH/GPBiPuBe4FWL58ecFlzMyy4Edv7WHWpPEsXZDty1XkG9QeQkTsTj8bgaeAFUBDOgxE+tmYFq8DFuQ1nw/sTvH5BeJmZsNS6/FOXtjcyBVLTs3s7TILGXBBkDRJ0pTuaeA3gI3AOmBNWmwN8HSaXgesllQpaRG5zuOX02GlZkkr0+ii6/PamJkNOz/d0sTR9k6uWjKv3KmclMEcMpoLPJVGiFYA34uIf5X0CvC4pBuAHcC1ABHxlqTHgbeBDuDmiOhMr3UT8AAwAXg2PczMhqVnN9YzY+I4Vp6R7fsf9DTgghAR7wEXFojvAy7vpc3twO0F4jXAkoHmYmaWFa3HO3l+UyO/+Yl5VAyDs5PzDa9szcwy7sebGmhp6+C3LvxYuVM5aS4IZmZD6Puv7WLetFNYeUa2745WiAuCmdkQ2dvSxs/eaWLV0uphNbqomwuCmdkQWbd+N51dwRcvLnhubea5IJiZDYGI4LFXdvLx6mmcPXdKudMZEBcEM7Mh8Or7B9jS0MzvffK0cqcyYC4IZmZD4Hsv7WByZQVXD8PRRd1cEMzMBunAkXb+ZUM9q5Z+jEmVg71EXPm4IJiZDdL3Xt5BW0cX11+ysNypDIoLgpnZILR1dPLAv2/n0sWzOefU4dmZ3M0FwcxsEH7wRj1NzW3850vPKHcqg+aCYGY2QF1dwT0/e5dz5k7h0sWzy53OoLkgmJkN0LMb91Db2MItnz2LdOXnYc0FwcxsALq6gr99YStnVk3iqo8Pr/se9MYFwcxsAP5lQz2b9zTzXz+7eFhet6gQFwQzs5PU3tHFN3+4hXNPnTIsL3Pdm8HcQnOBpJ9I2iTpLUl/mOLfkLRL0vr0uCqvzW2SaiVtkXRFXnyZpA1p3l0aCQfjzGzE+t5L77Nj/1Fu/fy5I2bvAAZ3C80O4I8j4rV0b+VXJT2X5t0ZEf87f2FJ5wOrgQuAjwE/lnR2uo3mPcBa4JfAM8CV+DaaZpZB+1rauPPHW/nVs2bza2dXlTudITXgPYSIqI+I19J0M7AJ6Ouar6uARyOiLSK2AbXACknzgKkR8WJEBPAQcM1A8zIzK6Y7/nUzR9o6+MbV54+IkUX5hqQPQdJC4CLgpRS6RdKbku6XNCPFqoGdec3qUqw6TfeMF1rPWkk1kmqampqGInUzs357edt+Hq+p44ZLF3HWnOF9VnIhgy4IkiYDTwJfjYjD5A7/nAksBeqBb3UvWqB59BH/aDDi3ohYHhHLq6pG1q6amWXbsfZO/scTb7Bg5gT+22cXlzudohhUQZA0jlwx+G5EfB8gIhoiojMiuoDvACvS4nXAgrzm84HdKT6/QNzMLDO++cMtbN93lDt++xPD+oqmfRnMKCMB9wGbIuLbefH8MzS+AGxM0+uA1ZIqJS0CFgMvR0Q90CxpZXrN64GnB5qXmdlQ+8mWRu7/f9u4/pLT+ZUzh/8lKnozmDL3KeDLwAZJ61Psz4DrJC0ld9hnO/AVgIh4S9LjwNvkRijdnEYYAdwEPABMIDe6yCOMzCwT9hxq5U8ef4NzT53Cn111XrnTKaoBF4SI+DcKH/9/po82twO3F4jXAEsGmouZWTG0Hu/kK//0Kq3HO/m737uIU8aNLXdKRTUyD4SZmQ1SRHDb9zfwxs6D/J8vLxuRo4p68qUrzMwK+OYPt/DU67v448+dzRUXnFrudErCBcHMrIe//0ktd//0Xa5bcRq3fPascqdTMj5kZGaWRATffu4d/vaFWq6+8GP85aoLRtzZyH1xQTAzI1cM/tczm/jOL7bxu8sX8L+++PERdeG6/nBBMLNR72h7B7c+uYF1b+xmzSWn8/XfuoAxo6wYgAuCmY1y2/ce4cZ/epUtDc389yvO4b9cduaoOkyUzwXBzEaliOAHb9bztac2MGaMeOD3V4y4y1mfLBcEMxt16g8d43/+80Z+vKmRC+dP4+9+72IWzJxY7rTKzgXBzEaN1uOdPPzi+/zN81vp6Oria1edx+9/aiEVYz0CH1wQzGwUON7ZxROv1vE3P97KnsOt/NrZVfzFqgs4fdakcqeWKS4IZjZiHTjSziOv7ODhF9+n/lArF502nTt/dymXnDmr3KllkguCmY0oHZ1d/Pu7+/jn9bt4ZkM9rce7+NRZs7j9C0v4zDlzRu0Iov5wQTCzYe9oewf/XruPn2xp5IdvNbC3pY2pp1TwxYvns+aShZxz6si/MN1QcEEws2Hn4NF2Xt9xkFffP/DBo72zi0njx/Jr51Rx9YXVfObcKiorRvblqoeaC4KZZVZHZxfb9x1la0Mz7zS08E5jM5vqD/Ne0xEAxo4R58+byvWXnM5nzp3D8oUzXAQGITMFQdKVwN8AY4F/jIi/KnNKZlYkEcGR9k72tbSxt6WNvS3t7G1po+FwG7sOHKPuwFHqDhxjz+FWOrsCAAkWzJjI2XMn89sXz2fZ6TP4xPxpTByfmY+xYS8TW1LSWODvgc8BdcArktZFxNvlzcxs5IoIOruCjq6gK01/8Mh73tUFHV1ddEXQ1tGVexzvoq2jM+953nRHJ63Hu2hp7aCl7TgtbR00t+YeLW0dtLR2cPBYO63Huz6SkwSnTj2F+TMmsGLRTKqnT2DR7EmcPXcKZ86Z5A//IsvK1l0B1EbEewCSHgVWkbv/8pB6/JWd3PuL9wrOi4he2/U6p/cmfc0a2LqA3ppFH636WFWf8/rSW/4DyT3XboD5D6BNX1kOZF25dkO8PQb499HbzIAPPvS7IlcEBvq776/JlRW5xykVTEmPj00/hSmV45g6oYLZkyuZNbmS2ZPHM3tyJbMnVzJz0njGV/gksXLJSkGoBnbmPa8DPtlzIUlrgbUAp5122oBWNGPSeM6Z28eIgz5GpPU2q69hbH0NcOtr9Fvf7QrP7XMwXZ/r6iP/AW2PIqxrAG9u4Nu3r3Ynn/9AfpcnMpAcxyh3zP1DD4kxY0RFj/gY5WJj0jIVY3Ox8RVjqKwYQ2XFWCrH5U1XjEnPc9Pjx44ZlVcLHe6yUhAK/eV85PtLRNwL3AuwfPnyAX2/+dz5c/nc+XMH0tTMbETLyr5ZHbAg7/l8YHeZcjEzG5WyUhBeARZLWiRpPLAaWFfmnMzMRpVMHDKKiA5JtwA/JDfs9P6IeKvMaZmZjSqZKAgAEfEM8Ey58zAzG62ycsjIzMzKzAXBzMwAFwQzM0tcEMzMDAD1dYp8lklqAt4fYPPZwN4hTGcoZTU353VynNfJy2puIy2v0yOiqtCMYVsQBkNSTUQsL3cehWQ1N+d1cpzXyctqbqMpLx8yMjMzwAXBzMyS0VoQ7i13An3Iam7O6+Q4r5OX1dxGTV6jsg/BzMw+arTuIZiZWQ8uCGZmBozggiDpWklvSeqStLzHvNsk1UraIumKXtrPlPScpK3p54wi5PiYpPXpsV3S+l6W2y5pQ1quZqjz6GWd35C0Ky+/q3pZ7sq0HWsl3VqCvL4pabOkNyU9JWl6L8uVZJud6P0r5640/01JFxcrl7x1LpD0E0mb0v/AHxZY5jJJh/J+v39e7Lzy1t3n76ZM2+ycvG2xXtJhSV/tsUxJtpmk+yU1StqYF+vX59Gg/x8jYkQ+gPOAc4CfAsvz4ucDbwCVwCLgXWBsgfZ/Ddyapm8F7ihyvt8C/ryXeduB2SXeft8A/uQEy4xN2+8MYHzarucXOa/fACrS9B29/V5Ksc368/6Bq4Bnyd0VcCXwUgl+d/OAi9P0FOCdAnldBvyglH9T/f3dlGObFfi97iF3AlfJtxnwaeBiYGNe7ISfR0Px/zhi9xAiYlNEbCkwaxXwaES0RcQ2oBZY0ctyD6bpB4FripNp7hsR8DvAI8VaR5GsAGoj4r2IaAceJbfdiiYifhQRHenpL8ndXa9c+vP+VwEPRc4vgemS5hUzqYioj4jX0nQzsIncfcuHi5Jvsx4uB96NiIFeCWFQIuLnwP4e4f58Hg36/3HEFoQ+VAM7857XUfifZW5E1EPuHwyYU8ScLgUaImJrL/MD+JGkVyWtLWIePd2Sdtnv72UXtb/bslj+gNw3yUJKsc368/7Luo0kLQQuAl4qMPsSSW9IelbSBaXKiRP/bsr9d7Wa3r+clWub9efzaNDbLTM3yBkIST8GTi0w62sR8XRvzQrEijb2tjupAxwAAAKESURBVJ85XkffewefiojdkuYAz0nanL5FFC034B7gL8ltm78kd0jrD3q+RIG2g96W/dlmkr4GdADf7eVlirLNeqZaINbz/Zf07+1DK5YmA08CX42Iwz1mv0bukEhL6h/6Z2BxKfLixL+bcm6z8cDVwG0FZpdzm/XHoLfbsC4IEfHrA2hWByzIez4f2F1guQZJ8yKiPu2uNhYjR0kVwBeBZX28xu70s1HSU+R2DQf94dbf7SfpO8APCszq77Yc0rwkrQF+E7g80sHTAq9RlG3WQ3/ef1G20YlIGkeuGHw3Ir7fc35+gYiIZyTdLWl2RBT9Im79+N2UZZslnwdei4iGnjPKuc3o3+fRoLfbaDxktA5YLalS0iJyFf7lXpZbk6bXAL3tcQzWrwObI6Ku0ExJkyRN6Z4m16m6sdCyQ6nHMdsv9LLOV4DFkhalb1aryW23YuZ1JfCnwNURcbSXZUq1zfrz/tcB16eRMyuBQ927/sWS+qTuAzZFxLd7WebUtBySVpD7LNhXzLzSuvrzuyn5NsvT6956ubZZ0p/Po8H/Pxa7x7xcD3IfYnVAG9AA/DBv3tfI9cZvAT6fF/9H0ogkYBbwPLA1/ZxZpDwfAG7sEfsY8EyaPoPcaIE3gLfIHTYpxfZ7GNgAvJn+qOb1zC09v4rcKJZ3S5EbuUEAO4H16fEP5dxmhd4/cGP375Tcbvzfp/kbyBvxVsScfpXcoYI387bTVT3yuiVtmzfIdc7/Son+rgr+bsq9zdJ6J5L7gJ+WFyv5NiNXkOqB4+kz7IbePo+G+v/Rl64wMzNgdB4yMjOzAlwQzMwMcEEwM7PEBcHMzAAXBDMzS1wQzMwMcEEwM7Pk/wNYtumuXL72iQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "y = x.exp()\n", "pl.plot(x.numpy(), y.numpy())" @@ -1427,32 +789,9 @@ }, { "cell_type": "code", - "execution_count": 77, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 77, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAb8ElEQVR4nO3deXBc1YHv8e+RZMnWvi/WYkl4lXcjGzuEJZjFDCZAJpMAIaFIMoTUI2SfrCSVWVKZN6mQvGIyGbMMhLAECITAEMAJJDbg3Rivkhfttlr72nJr6T7vjxbGAQO21erbt/v3qUpZaim3f7cs/Tg+95x7jbUWERFxrzinA4iIyMSoyEVEXE5FLiLicipyERGXU5GLiLhcghNvmpuba8vLy514axER19qxY0entTbvna87UuTl5eVs377dibcWEXEtY0zjqV7X1IqIiMupyEVEXE5FLiLicipyERGXU5GLiLicilxExOVU5CIiLufIOnIRkVhgraXbO0JDl5f6ziEau7x8orqU0uzkkL6PilxEZAKstfQMjVLf6aWxy0tDp5f6riEaOr00dHkZ8I2d+N74OMOysiwVuYiIE/p9o9R1eKnvHKS+M1jUjV1e6ju99J9U1nEGirOmUZ6TwnVlxZTnpFCem0x5TgolWckkJoR+RltFLiIybswfoKXnOHWdg9R1eDnSMciRDi91HV46B4dPfJ8xUJw5jYrcFK5ZUsyMnGQqclMoz02hdJLK+v2oyEUk5vQOjYwX9CB1ncE/j3QER9ij/rcff5mVPIXKvFQumZtHZV4qlbkpVOalUJqdTFJCvINn8LdU5CISlUb9AZq7h06MrOs6vCdG2l3ekRPfNyXeUJadTGVeKpfOK6AyL4Vz8lKozE0lKyXRwTM4fSpyEXG14TE/9Z1eDrUNcqh9kMPtAxxqG6S+08tY4O3RdW5qIpW5qVxW9VZZp1KZl0pp1jQS4t29EltFLiKu4Bv1U9fh5dB4UR9qH+BQ+yCNXUP4xws7zsCMnBRm5qdyaVXBeFmncE5uKhnJUxw+g8mjIheRiHJ8xM+RjmBRH2wb5FBbcJTd1D3EWwPs+DjDjJxkZuencdXCImbmpzIrP43KvBSmTomcuetwUZGLiCPG/AEauoao9QxQ4+mnxjNArWeA5p4h7HhhJ8QZKnJTqJqezkeXFDO7IFjY5bmRdbHRaSpyEZl0HQPDf1PYNZ5+DrUNMjwWAIJTIhW5KSwszuDvl5UwqyCVWfmplOemMMXl89fhMOEiN8aUAr8GCoEAsM5a+4uJHldE3Mc36udQ2yAHPP0nirvWM0Dn4NurRHJTk5hXlManV85gblE6cwvTmJmfGpNTIqESihH5GPB1a+1OY0wasMMYs95auz8ExxaRCGStpa1/mH3H+th/rJ8D4yPthk7viXnsqVPimF2Qxkfm5J8o7DmFaeSmJjkbPgpNuMitta1A6/jHA8aYA0AxoCIXiQKBgKWhy8u+Y/3j/wuW98lrsWfkJDO3MI21i6Yzb7ywZ+SkEB9nHEweO0I6R26MKQeWAltO8bVbgVsBysrKQvm2IhIiw2PBqZH944W971g/B1r78Y74geDmmVn5aVwyN5/509Opmp7BvKI00qZG79I+NwhZkRtjUoHfAV+x1va/8+vW2nXAOoDq6mr7zq+LSHj5Rv3UeAbY3dLLnpY+9h7r53D7wIkt6imJ8cwrSufj55Ywf3oGVdPTmVWQqtUiESgkRW6MmUKwxB+21j4VimOKSOiM+gMcahtkd0svu4/2sbull1rP26Wdk5LI/OIMLp6TR1VROvOnp1Oek0KcpkZcIRSrVgxwH3DAWvuziUcSkYkIBCx1nd5gabcES3vfsf4TS/3SpiawqCSDz19QyaLiDBaVZjI9YyrBX2Vxo1CMyM8HPg3sMcbsGn/tu9ba50NwbBH5AK19x9nZ2MubLb3sbull79F+BoeD98eeNiWeBcXp3LRyBotKMlhUksmM7GSNtKNMKFatvArop0IkDHyjfvYd6+eNph52NvWws7EXT78PgMT4OOYVpXHd0mIWlmSwuCSTmfmpWjkSA7SzUyRCWWs51ucLlnZjLzubeth/rJ8Rf3CKpDhzGssrsllWlsmysizmFaWH/YEGEhlU5CIRIjja7jtR2jubemjrDz6VJikhjsUlmdxyfjlLy7JYVpZJfvpUhxNLpFCRizik7/goOxq72VLfzbb6bvYc7TuxiqQ0exorK3NYVpbF0rJM5hWl654j8p5U5CJh0tbvY2t9N9sautla301t2wDWBjfZLCzO4LMfruDcsiyWlmWRl6Zt7HL6VOQik8BaS0PXENvqu9naECzvxq4hAJIT4zl3RhZ/t7CI5eXZLC3L1A2jZEJU5CIh0tw9xKYjXbx+pJPXj3TRPhCc385OSaR6RhafXjmDFRXZVBWlu/7RYhJZVOQiZ8nT52NTXed4eXfR0nMcCN6m9UPn5HBeZTbnVWRzTl6qNtvIpFKRi5ymrsFhNtd18/qRTjbVdVHX4QUgY9oUVlXmcOuFlayqzGFmvopbwktFLvIefKN+tjV0s/FQJxsOdlDjGQCCN5M6rzKHG1eUsbIyh6qidO2UFEepyEXGWWs50jHIXw8Gi3tLfRe+0QCJ8XFUl2fxzSvmsOqcHBYWZ2gpoEQUFbnEtL6hUV47EizuDQc7ONYX3O5emZvC9cvLuHB2Lisrc0hO1K+KRC79dEpMCQQsu4/28ZfadjYc7GBXcy8BC2lJCZw/M5fbL8njglm5lGYnOx1V5LSpyCXqeYfH2Hiok5dr2ni5poPOwWGMgUUlmdz+kZlcODuPJaWZWhIorqUil6jU3D3EyzXt/Lmmnc1HuhjxB0ibmsDFc/JZPTefC2fnkZ2S6HRMkZBQkUtUsNayu6WPF/d5+POBdmrbgitMKnNTuPlDM7hkbgHV5Vm6SClRSUUurjXmD7CtoYcX93l4aZ+HY30+4uMMK8qz+f5V87hkbj6VealOxxSZdCpycRXfqJ/XDnfy4j4P6/e30TM0SlJCHBfOzuPrl89h9bx8MpM1ZSKxRUUuEc836ucvte08t7uVV2ra8Y74SUtKYPW8fK6YX8hFc/K0PFBimn76JSKNjAXYeKiD53a38tI+D94RP7mpiXx0STFrFhSyqjJHT8MRGacil4gx5g+wqa6LZ988xgt7PfT7xsiYNoWrF0/n6sXTOa8iW0sERU5BRS6OstayvbGHZ3Yd5Y97PHR5R0hNSuDyqgKuXjyd82fmauQt8gFU5OKI5u4hfrezhad2HqWpe4ipU+K4dF4BaxdN5+I5eXrQgsgZUJFL2Az4RvnjHg9P7mxha303xsCqyhy+vHoWaxYUkpKkH0eRs6HfHJlU/oDltcOd/G5nCy/u8+AbDVCZm8I3r5jDtUuLKc6c5nREEddTkcukaO07zmNbm3l8ezOtfT4ypk3h4+eW8LFlJSwtzdSDF0RCSEUuIeMPWP56sJ1HtjTxck07FrhgVh53rq1i9bx8khI07y0yGVTkMmGePh+/3dbMb7c1cazPR15aEl+8+ByuX16m28GKhIGKXM5KIGD566GOE6Nvf8Bywaxc7lxbxaVVBbo5lUgYqcjljAz4RnlyRwsPvt5AQ9cQuamJ3HphJTcsL6MsR6NvESeoyOW0NHZ5eeD1Bp7Y3sLg8BjLyjL52uVzWDO/UBt2RBymIpf3ZK1lS30392yo4+XaduKNYe2iIm45v4LFpZlOxxORcSpyeRd/wPLSPg+/2lDHm8295KQk8qWPzORTK2dQkD7V6Xgi8g4qcjnBN+rnqZ1HuWdjHfWdXmbkJPOv1y7g4+eWaMu8SARTkQuDw2M8tKmR+16tp3NwmEUlGfzyU8u4Yn4h8XHauCMS6UJS5MaY+4G1QLu1dkEojimTb8A3yq83NXLPxjp6h0a5YFYuX7x4Casqc7TzUsRFQjUifwC4G/h1iI4nk6jfN8oDrzVw36v19B0f5ZK5+dyxehZLdAFTxJVCUuTW2g3GmPJQHEsmz+DwGPdtrOfeV+sY8I1x6bxggS8qUYGLuFnY5siNMbcCtwKUlZWF620FGB7z88iWJu5++TBd3hEuqyrgy6tnsaA4w+loIhICYStya+06YB1AdXW1Ddf7xjJ/wPLMrqP8bP1BWnqOs6oyh39aM4elZVlORxORENKqlShkreWV2nb+7wu11HgGmD89nR9ft5ALZuXqIqZIFFKRR5mDbQP8y3P72Xiok/KcZO6+cSl/t6CIOC0jFIlaoVp++ChwMZBrjGkBfmitvS8Ux5bT0zs0wl3rD/KbLU2kJMbzw6uruGnlDN2FUCQGhGrVyg2hOI6cuTF/gEe2NvGz9QfpPz7Kp86bwVcvm012SqLT0UQkTDS14mJb67u58/d7qW0bYFVlDj/8aBVzC9OdjiUiYaYid6G+oVF+8sIBHt3aTHHmNH5107lcMb9AFzJFYpSK3EWstTy3u5UfPbufnqER/vGCCr562WySE/XXKBLL1AAu0dw9xJ3P7OUvtR0sLM7ggVuWa0OPiAAq8ogXCFh+vamBf3+hFmPgB2uruPlD5boroYicoCKPYK19x/nmE7t59XAnF8/J49+uW0hx5jSnY4lIhFGRR6hndh3lzt/vZdRv+fF1C7lhRakuZorIKanII0zv0Ah3PrOPZ988xtKyTO76xBLKc1OcjiUiEUxFHkFeO9zJ1x9/k87BYb5x+Wxuu+gcErQzU0Q+gIo8AgQClrtfOcxdfzpIZW4K93zmfBaWaEWKiJweFbnDerwjfPXxXfyltoNrl0znxx9bqHXhInJG1BgO2t3Syxd/s5OOgWH+5doF3HRemS5oisgZU5E7wFrLw1ua+Odn95OXlsQTt61isZ6XKSJnSUUeZr5RP999eg9P7TzKRbPz+Pknl5ClOxWKyASoyMOoY2CYLzy0nZ1NvXzl0lncccksPfBBRCZMRR4mB1r7+fyD2+nyDvNfn1rGlQuLnI4kIlFCRR4GGw91cNtDO0idmsATX/iQlhaKSEipyCfZ7984yjeeeJOZ+ak8cMsKCjOmOh1JRKKMinwSrdtwhB8/X8PKymzWfaaa9KlTnI4kIlFIRT4JrLX8x4u1/PIvR7hqURE/+8RikhLinY4lIlFKRR5i1lp+9Ox+Hni9gRvPK+Nfr1mglSkiMqlU5CHkD1i+9/QeHtvWzOc+XMH3r5qnnZoiMulU5CESCFi+89RuHt/ewpcumcnXLputEheRsFCRh4C1lh/8YS+Pb2/hjktm8rXL5zgdSURiiG52PUHWWv75uf38ZnMTt110Dl+9bLbTkUQkxqjIJ+iu9Qf5n9ca+Oz5FXxrzRxNp4hI2KnIJ+ChzY38v5cP84nqEu5cqwubIuIMFflZemFvKz94Zi+r5+bz4+sWqsRFxDEq8rOwraGbOx7bxdLSTO6+cZmeqykijlIDnaHm7iG+8NAOSjKncd/Ny5mWqB2bIuIsFfkZGBwe4/MPbmfMH+Dem6v1QAgRiQhaR36a/AHLlx99g8Mdgzx4ywoq81KdjiQiAmhEftruWn+QP9e088Orq/jwrFyn44iInKAiPw2v1LZz9yuH+WR1KZ9ZVe50HBGRvxGSIjfGrDHG1BpjDhtjvh2KY0aKY73H+dpvdzG3MI0fXTPf6TgiIu8y4SI3xsQD/wlcCVQBNxhjqiZ63Egw6g9w+yM7GRkL8MtPLWPqFK1QEZHIE4oR+QrgsLW2zlo7AjwGXBOC4zrupy/VsrOpl5/8/SJd3BSRiBWKIi8Gmk/6vGX8tb9hjLnVGLPdGLO9o6MjBG87ubbWd7NuQx03rCjj6sXTnY4jIvKeQlHkp9qbbt/1grXrrLXV1trqvLy8ELzt5BkcHuPrT+yiNCuZ7181z+k4IiLvKxTryFuA0pM+LwGOheC4jvm3/z1AS89xHv/CKlKStNReRCJbKEbk24BZxpgKY0wicD3whxAc1xGv1LTz6NYmbr2gkuXl2U7HERH5QBMeblprx4wxtwMvAvHA/dbafRNO5gDv8BjffXoPswtS9YAIEXGNkMwbWGufB54PxbGc9PM/HaS1z8fdN67SUkMRcQ3t7Bx3oLWf+19r4PrlpZw7Q1MqIuIeKnIgELB8//d7yZg2hW+tmet0HBGRM6IiB57c0cKOxh6+feVc3ZpWRFwn5ot8wDfKv79Qw/LyLD6+rMTpOCIiZyzmi/yeDXV0eUe4c20VcXF67qaIuE9MF3n7gI97NtazdlERi0oynY4jInJWYrrIf/GnQ4z6A3zj8jlORxEROWsxW+R1HYM8tq2ZG88rozw3xek4IiJnLWaL/Kcv1TI1IY47Vs9yOoqIyITEZJHXegZ4fo+Hz324gtzUJKfjiIhMSEwW+X//9QjJifHccn6F01FERCYs5oq8pWeIZ948xg0ryrT5R0SiQswV+b0b6zHA5z6s0biIRIeYKvKuwWEe29bEtUuLmZ45zek4IiIhEVNF/uCmRnyjAW67qNLpKCIiIRMzRX58xM+DrzdweVUBM/PTnI4jIhIyMVPkz+0+Rt/xUT6ruXERiTIxU+SPbWumMi+F8yr00AgRiS4xUeQH2wbY0djDDcvLMEZ3OBSR6BITRf7o1iamxBs+tqzY6SgiIiEX9UXuG/Xz9BtHuWJ+ITnaji8iUSjqi/zFfR56h0a5YUWZ01FERCZF1Bf5I1uamJGTzKrKHKejiIhMiqgu8sYuL1vqu/nk8lI9xk1EolZUF/nzezwAXLNEFzlFJHpFdZG/sM/D4pIMinVfFRGJYlFb5Md6j/Nmcy9rFhQ5HUVEZFJFbZG/sDc4rbJmQaHDSUREJldUF/ncwjQq9GBlEYlyUVnk7QM+tjV2azQuIjEhKov8pX1tWAtXan5cRGJAVBb5C3s9VOamMLsg1ekoIiKTLuqKvHdohE11XVyxoFB3OhSRmBB1Rb7hUCf+gOXyqgKno4iIhMWEitwY8w/GmH3GmIAxpjpUoSZic10XaUkJLCzOcDqKiEhYTHREvhf4GLAhBFlCYvORLlZUZJMQH3X/2BAROaUJtZ219oC1tjZUYSaqrd9HXaeXlbrToYjEkLANW40xtxpjthtjtnd0dEzKe2yu6wJQkYtITEn4oG8wxvwJONXOmu9Za5853Tey1q4D1gFUV1fb0054BjbXdZE2NYGq6emTcXgRkYj0gUVurb00HEFCYXNdN+dVZBOve4+LSAyJmiuCnj4f9ZofF5EYNNHlh9cZY1qAVcD/GmNeDE2sM7elXvPjIhKbPnBq5f1Ya58Gng5RlgnZdKSL9KkJzCvS/LiIxJaomVrZXNfFiooczY+LSMyJiiJv7TtOQ9cQKyuznY4iIhJ2UVHkW+u7Ac2Pi0hsiooi39/aT2J8HHMK05yOIiISdlFR5DWtA5yTn8oU3V9FRGJQVDRfrWeAeRqNi0iMcn2R93hH8PT7NK0iIjHL9UVe4xkAYK7Wj4tIjHJ9kdd6+gE0tSIiMcv1RV7jGSAreQp5aUlORxERcURUFPncwnQ9aFlEYparizwQsBxsG9CFThGJaa4u8uaeIYZG/MwrUpGLSOxydZEfaB1fsVKoFSsiErtcXeS1ngGMgdkFGpGLSOxydZHXePopz0lhWmK801FERBzj8iIfYI5G4yIS41xb5MdH/DR0eZmrC50iEuNcW+QH2wawVhc6RURcW+S1b91jRWvIRSTGubbIG7q8JMQZSrOTnY4iIuIo1xa5p99HQfpUPWxZRGKea4u8rd9HfrpulCUi4uIiH6YwfarTMUREHOfeIu8LTq2IiMQ6Vxa5d3iMgeExFbmICC4t8rZ+HwCFGZojFxFxZZF7xotcI3IREZcWeZuKXETkBJcW+TCAVq2IiODSIvf0+UhLSiAlKcHpKCIijnNlkWszkIjI21xb5IUZmlYREQHXFvmwLnSKiIybUJEbY/7DGFNjjNltjHnaGJMZqmDvJRCwtPVrV6eIyFsmOiJfDyyw1i4CDgLfmXik99c9NMJYwGrFiojIuAkVubX2JWvt2Pinm4GSiUd6f54+rSEXETlZKOfIPwv88b2+aIy51Riz3RizvaOj46zf5O3NQFq1IiIC8IELsY0xfwIKT/Gl71lrnxn/nu8BY8DD73Uca+06YB1AdXW1Pau0nLQZSKtWRESA0yhya+2l7/d1Y8zNwFpgtbX2rAv6dHn6fRgDeakakYuIwGkU+fsxxqwBvgVcZK0dCk2k99fW5yM3NYmEeFeunBQRCbmJtuHdQBqw3hizyxjzqxBkel9tAz6tWBEROcmERuTW2pmhCnK6PH0+SrKSw/22IiIRy3XzE8HNQJofFxF5i6uK3Dfqp2doVFMrIiIncVWRdwwElx4WaOmhiMgJripyPeJNROTd3FXk49vzNbUiIvI2VxX5W9vzVeQiIm9zXZEnJcSRPk2PeBMReYurivycvFSuXVKMMcbpKCIiEcNVQ9vrV5Rx/Yoyp2OIiEQUV43IRUTk3VTkIiIupyIXEXE5FbmIiMupyEVEXE5FLiLicipyERGXU5GLiLicCcPzkt/9psZ0AI1n8H/JBTonKU4k03nHllg9b4jdcz/T855hrc1754uOFPmZMsZst9ZWO50j3HTesSVWzxti99xDdd6aWhERcTkVuYiIy7mlyNc5HcAhOu/YEqvnDbF77iE5b1fMkYuIyHtzy4hcRETeg4pcRMTlIrrIjTFrjDG1xpjDxphvO50nXIwxpcaYV4wxB4wx+4wxX3Y6UzgZY+KNMW8YY55zOku4GGMyjTFPGmNqxv/eVzmdKRyMMV8d/xnfa4x51BgTlQ/kNcbcb4xpN8bsPem1bGPMemPMofE/s872+BFb5MaYeOA/gSuBKuAGY0yVs6nCZgz4urV2HrAS+D8xdO4AXwYOOB0izH4BvGCtnQssJgbO3xhTDNwBVFtrFwDxwPXOppo0DwBr3vHat4E/W2tnAX8e//ysRGyRAyuAw9baOmvtCPAYcI3DmcLCWttqrd05/vEAwV/qYmdThYcxpgS4CrjX6SzhYoxJBy4E7gOw1o5Ya3udTRU2CcA0Y0wCkAwcczjPpLDWbgC63/HyNcCD4x8/CFx7tseP5CIvBppP+ryFGCmzkxljyoGlwBZnk4TNz4F/AgJOBwmjSqAD+J/xKaV7jTEpToeabNbao8BPgSagFeiz1r7kbKqwKrDWtkJw8Abkn+2BIrnIzSlei6m1ksaYVOB3wFestf1O55lsxpi1QLu1dofTWcIsAVgG/Je1dingZQL/zHaL8Tnha4AKYDqQYoy5ydlU7hTJRd4ClJ70eQlR+s+uUzHGTCFY4g9ba59yOk+YnA981BjTQHAq7RJjzG+cjRQWLUCLtfatf3U9SbDYo92lQL21tsNaOwo8BXzI4Uzh1GaMKQIY/7P9bA8UyUW+DZhljKkwxiQSvAjyB4czhYUxxhCcLz1grf2Z03nCxVr7HWttibW2nODf98vW2qgfoVlrPUCzMWbO+Eurgf0ORgqXJmClMSZ5/Gd+NTFwkfckfwBuHv/4ZuCZsz1QQkjiTAJr7Zgx5nbgRYJXs++31u5zOFa4nA98GthjjNk1/tp3rbXPO5hJJteXgIfHBy11wC0O55l01totxpgngZ0EV2q9QZRu1TfGPApcDOQaY1qAHwI/AR43xnyO4H/U/uGsj68t+iIi7hbJUysiInIaVOQiIi6nIhcRcTkVuYiIy6nIRURcTkUuIuJyKnIREZf7/zFu8n+yeXWjAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "y = torch.log(x)\n", "pl.plot(x.numpy(), y.numpy())" @@ -1468,20 +807,9 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "device(type='cpu')" - ] - }, - "execution_count": 79, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "my_device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", "my_device" @@ -1497,20 +825,9 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([1., 1., 1., 1., 1.])" - ] - }, - "execution_count": 88, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# you can initialize a tensor in a specfic device\n", "torch.ones(5, device=my_device)" @@ -1518,22 +835,9 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[1., 0., 0.],\n", - " [0., 1., 0.],\n", - " [0., 0., 1.]])" - ] - }, - "execution_count": 91, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# you can move data to the GPU by doing .to(device)\n", "data = torch.eye(3) # data is on the cpu \n", @@ -1549,22 +853,9 @@ }, { "cell_type": "code", - "execution_count": 83, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[2., 0., 0.],\n", - " [0., 2., 0.],\n", - " [0., 0., 2.]])" - ] - }, - "execution_count": 83, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "res = data + data\n", "res" @@ -1572,20 +863,9 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "device(type='cpu')" - ] - }, - "execution_count": 87, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# you can get a tensor's device via the .device attribute\n", "res.device" @@ -1615,17 +895,9 @@ }, { "cell_type": "code", - "execution_count": 140, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(2.)\n" - ] - } - ], + "outputs": [], "source": [ "x = torch.tensor(2.)\n", "print(x)" @@ -1633,17 +905,9 @@ }, { "cell_type": "code", - "execution_count": 156, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(2., requires_grad=True)\n" - ] - } - ], + "outputs": [], "source": [ "# setting requires_grad in directly via tensor's constructor\n", "x = torch.tensor(2., requires_grad=True)\n", @@ -1657,18 +921,9 @@ }, { "cell_type": "code", - "execution_count": 157, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "None\n" - ] - } - ], + "outputs": [], "source": [ "print(x.requires_grad)\n", "print(x.grad) # no gradient yet" @@ -1676,17 +931,9 @@ }, { "cell_type": "code", - "execution_count": 158, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Grad of x: None\n" - ] - } - ], + "outputs": [], "source": [ "# let's perform a simple operation on x\n", "y = x ** 2\n", @@ -1696,17 +943,9 @@ }, { "cell_type": "code", - "execution_count": 159, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Grad of y with respect to x: tensor(4.)\n" - ] - } - ], + "outputs": [], "source": [ "# if you want to compute the derivatives, you can call .backward() on a Tensor\n", "y.backward()\n", @@ -1722,21 +961,9 @@ }, { "cell_type": "code", - "execution_count": 179, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor(2., requires_grad=True)\n", - "tensor(4., grad_fn=)\n", - "tensor(4.)\n", - "tensor(218.3926, grad_fn=)\n", - "tensor(873.5704)\n" - ] - } - ], + "outputs": [], "source": [ "x = torch.tensor(2., requires_grad=True)\n", "print(x)\n", @@ -1763,20 +990,9 @@ }, { "cell_type": "code", - "execution_count": 177, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "x: tensor(2., requires_grad=True)\n", - "y: tensor(4., grad_fn=)\n", - "x: tensor(2., requires_grad=True)\n", - "y: tensor(8.)\n" - ] - } - ], + "outputs": [], "source": [ "x = torch.tensor(2.)\n", "x.requires_grad = True\n", @@ -1813,7 +1029,7 @@ }, { "cell_type": "code", - "execution_count": 137, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1823,30 +1039,9 @@ }, { "cell_type": "code", - "execution_count": 138, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([1.0000, 1.1000, 1.2000, 1.3000, 1.4000, 1.5000, 1.6000, 1.7000, 1.8000,\n", - " 1.9000, 2.0000, 2.1000, 2.2000, 2.3000, 2.4000, 2.5000, 2.6000, 2.7000,\n", - " 2.8000, 2.9000, 3.0000, 3.1000, 3.2000, 3.3000, 3.4000, 3.5000, 3.6000,\n", - " 3.7000, 3.8000, 3.9000, 4.0000, 4.1000, 4.2000, 4.3000, 4.4000, 4.5000,\n", - " 4.6000, 4.7000, 4.8000, 4.9000, 5.0000, 5.1000, 5.2000, 5.3000, 5.4000,\n", - " 5.5000, 5.6000, 5.7000, 5.8000, 5.9000, 6.0000, 6.1000, 6.2000, 6.3000,\n", - " 6.4000, 6.5000, 6.6000, 6.7000, 6.8000, 6.9000, 7.0000, 7.1000, 7.2000,\n", - " 7.3000, 7.4000, 7.5000, 7.6000, 7.7000, 7.8000, 7.9000, 8.0000, 8.1000,\n", - " 8.2000, 8.3000, 8.4000, 8.5000, 8.6000, 8.7000, 8.8000, 8.9000, 9.0000,\n", - " 9.1000, 9.2000, 9.3000, 9.4000, 9.5000, 9.6000, 9.7000, 9.8000, 9.9000],\n", - " requires_grad=True)" - ] - }, - "execution_count": 138, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "x = torch.arange(1, 10, 0.1, dtype=torch.float, requires_grad=True)\n", "x" @@ -1854,7 +1049,7 @@ }, { "cell_type": "code", - "execution_count": 139, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1864,71 +1059,20 @@ }, { "cell_type": "code", - "execution_count": 135, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([ 8.4347e-01, 9.6745e-01, 7.0215e-01, -1.5371e-01, -1.5448e+00,\n", - " -2.8139e+00, -2.5935e+00, 3.4371e-01, 4.9029e+00, 6.1208e+00,\n", - " -7.1154e-01, -9.6026e+00, -4.3225e+00, 1.2014e+01, 5.0618e+00,\n", - " -1.7083e+01, 5.3129e+00, 1.4884e+01, -2.4178e+01, 1.9542e+01,\n", - " -8.3407e+00, -2.0807e+00, 8.0647e+00, -7.7079e+00, -1.1458e+00,\n", - " 2.0335e+01, -4.4317e+01, 4.9858e+01, -6.3655e+00, -5.7770e+01,\n", - " 2.6313e+01, 6.9782e+01, 1.9344e+01, -4.6108e+01, -8.0576e+01,\n", - " -9.1352e+01, -9.6715e+01, -1.0142e+02, -8.7320e+01, -1.8511e+01,\n", - " 9.4970e+01, 9.7035e+01, -9.6455e+01, -4.8176e+01, 1.3684e+02,\n", - " -1.5338e+02, 1.5415e+02, -1.6741e+02, 1.6771e+02, -7.1524e+01,\n", - " -1.3881e+02, 1.4273e+02, 1.9093e+02, 6.2615e+01, -4.0747e+01,\n", - " -6.2126e+01, 9.7839e+00, 1.7279e+02, 2.5614e+02, -5.8047e+01,\n", - " -2.4144e+02, 2.8856e+02, -2.5302e+02, 2.7245e+02, -3.2850e+02,\n", - " 2.1105e+02, 2.3265e+02, -1.9564e+02, -3.6942e+02, -3.7990e+02,\n", - " -3.9796e+02, -3.5900e+02, 7.7348e+00, 4.3732e+02, -2.2126e+02,\n", - " -2.6230e+01, 5.5592e+01, 1.6339e+02, -4.8904e+02, 1.6288e+02,\n", - " 5.2793e+02, 5.0286e+02, 5.1288e+02, 5.7520e+02, 2.1343e+02,\n", - " -5.8578e+02, 2.3057e+02, -2.5720e+01, 1.8482e+02, -6.0595e+02])" - ] - }, - "execution_count": 135, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "x.grad" ] }, { "cell_type": "code", - "execution_count": 134, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "data": { - "text/plain": [ - "[]" - ] - }, - "execution_count": 134, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD4CAYAAAAEhuazAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO29eZhcZ3Wv+66ae27Nas2yPMoSnoQHjM1gwGa0k5wEhwAOgTjhkIQkJ+Hi5CT3JgfncrgJUxLgGAjYYGJ8iC8mXDAemMGWLQ/YlmTZsuahpZbUc3fN3/1j769qV9Wu6qquXepu9XqfR09379pV9anUWnvt3/p9a4kxBkVRFGV+EZrpBSiKoiinHw3+iqIo8xAN/oqiKPMQDf6KoijzEA3+iqIo85DITC+gXhYvXmzWrVs308tQFEWZUzz55JMnjDFLyo/PmeC/bt06tm3bNtPLUBRFmVOIyH6/4yr7KIqizEMCCf4i0isi3xKRF0Rkp4hcJSILReQhEXnJ/brAc/5tIrJbRHaJyPVBrEFRFEWpn6Ay/88ADxhjzgcuAnYCHwUeMcacAzzi/oyIbARuBi4EbgA+JyLhgNahKIqi1EHTwV9EuoFrgS8DGGPSxpgh4EbgTve0O4Gb3O9vBO4xxqSMMXuB3cDlza5DURRFqZ8gMv+zgAHgKyLytIh8SUQ6gGXGmKMA7tel7vkrgYOe5x9yj1UgIreKyDYR2TYwMBDAUhVFURQIJvhHgEuBzxtjLgHGcSWeKojPMd/ucsaYO4wxW4wxW5YsqXAqKYqiKNMkiOB/CDhkjNnq/vwtnIvBMRHpA3C/Hvecv9rz/FXAkQDWoSiKotRJ08HfGNMPHBSR89xD1wE7gO8At7jHbgHud7//DnCziMRFZD1wDvB4s+tQFEWZaR54vp8jQ5MzvYy6CMrt88fA3SLyLHAx8A/Ax4E3ishLwBvdnzHGbAfuxblAPAB8yBiTC2gdiqIoM8LhoUn+8OtPcvdW3z1Vs45AdvgaY54Btvg8dF2V828Hbg/ivRVFUWYDDzzfD8DIZHaGV1IfusNXURQlAL7/3FEARpOZGV5JfWjwVxRFaZL+4STb9g8CMJbSzF9RFGVe8IPtjuSzrDvOSHJuBP8509VTURRltvK9545y7rJO1ixs58hQcqaXUxea+SuKojTBwGiKx/ed4s2b+uhKRBlNqeavKIoy55lM5/jMwy8xOJ72ffwH2/sxBt6yuY/OeISxOSL7aPBXFEWpwZP7B/nUwy/yoW88RSaXr3j8+88f5awlHZy7rJOuRITRZBZjfDvWzCo0+CuKotRgxLVu/vLlk9z+/+0seezkWIrH9pziLZv6EBE6ExGyeUMqW3mRmG1o8FcURamBlXHevGk5X/3lPr75xAGMMfxi9wlu/dqT5PKGN29eDkBXIgoULxizGXX7KIqi1GDU9e1/7KZNjKWy/PdvP883Hj/Irw4Osaw7zj/82mYuXNEDQFfcCaljySxLu2ZsyXWhwV9RFKUGNvPvaYvyz799Cb/++V8yMJLkf9y0id+8bBWJaHEQYVfCCamjc6Doq8FfURSlBqPJDO2xMJFwiN72GA98+FrCISEcqhxN0mkz/zmwy1eDv6IoSg3GUtlCUAeIRaqXSjsLmf/s1/y14KsoilKD0VS2ENSnotst+M4F2UeDv6IoSg1Gk9mCi2cq5pLso8FfURSlBmPJTMHFMxWdc6jgq8FfURSlBuWafy2i4RCJaEgzf0VRlLmOI/vU743pSkS14KsoijLXGUvWX/AFZ6OXyj6KoihzmHzeMJbO1q35A4XmbrMdDf6KosxpjDE8vOOYb8fNZhlPZzGGut0+4BR9VfNXFEVpMbuOjfKBu7bx0xcHAn9tG8Qbk31U81cURWk5wxNOoG1Ftm37+tTr9gE381fZR1EUpbWMp51A24oe+nYYe2Nun3mm+YtIWESeFpHvuj8vFJGHROQl9+sCz7m3ichuEdklItcHtQZFUeYf46kc0Jrgb+8mGgr+8Qhj6Sz5/Oye5hVk5v9hwDvm5qPAI8aYc4BH3J8RkY3AzcCFwA3A50QkjKIoyjSYsJl/Jhf4axdln8YKvsYU70hmK4EEfxFZBbwV+JLn8I3Ane73dwI3eY7fY4xJGWP2AruBy4NYh6Io849WZv62cNvoJi+Y/f19gsr8Pw18BPB++suMMUcB3K9L3eMrgYOe8w65xxRFURpmooWa/3TcPrY4PNt1/6aDv4i8DThujHmy3qf4HPMVx0TkVhHZJiLbBgaCt3EpijL3GStk/sHLPjaAd8QaK/h6nztbCSLzvxp4h4jsA+4BXi8iXweOiUgfgPv1uHv+IWC15/mrgCN+L2yMucMYs8UYs2XJkiUBLFVRlDONoubfCtnHaermN7WrGjb4n/GyjzHmNmPMKmPMOpxC7g+NMe8GvgPc4p52C3C/+/13gJtFJC4i64FzgMebXYeiKPOT1rp9Mg15/KGo+c/2jV6tHOP4ceBeEXk/cAD4TQBjzHYRuRfYAWSBDxljgr9fUxRlXlDU/Fvg9mlgipelMNBllss+gQZ/Y8yPgR+7358Erqty3u3A7UG+t6Io85PxdCvdPo21c4b5pfkriqLMGBOp1mv+jdARiyDizP6dzWjwVxRlTmMLq+kWdPUcSzWe+YdCQmcsMus1fw3+iqLMaSas7NOiHb5dDezutcyF5m4a/BVFmdO0cpPXaDLTcMEX5kZzNw3+iqLMaVpl9czlDePpXMOaPziOnzPe568oijJT5PKGyUxrdvjaxmyNav4AnXNgiLsGf0VR5iwTns6ZQbt9RqfRy9/SlYio20dRFKVV2GJvSIKXfabTztnSFVfNX1EUpWWMu9n1gvZY4LLPWKrxds6WLnX7KIqitA6b+S/oiAWe+dsRjtNx+3TGo0xmcmRasPcgKDT4K4oyZ7GZ/8L2GOlsHmOCG51oM/euabh97N3C+CzW/TX4K4oyZ7GOnAUdji4fZPZfnN87vU1eMLv7+2jwVxSlKrm84WuP7iPdgg1UQWA9/gs7YkCwwd9aNacj+3Rr8FcUZS7z9IFB/ub+7fzy5RMzvRRfrNVzQbsN/sEVfceSWUSgPRpu+LnWITSbN3pp8FcUpSq2XfJkenaO3KjI/AP0+o+mnI6eoQameFmKbZ1n70YvDf6KolQl6e6enfRpmvbisVF+/65tLRmiUi+VmX+Qsk92WsVeKEpFmvkrijInqRX8t+49xUM7jtE/nDzdyyowns4RC4cKwTZo2Wc6ej8UHUIjqvkrijKbMMbU5UG3ck/SR05J1njsdDGeytIeDxOPOKEsaLfPdJw+UHQIzeaNXhr8FWUecvfWA1z1f/+QXL62L95m/EmfzL/WY6eL8VSOjliEeMQpygaq+ScbH95uSURDhEOimr+iKLOLB3cc48RYipHJ2sHJBni/gm+xm+bMZf4T6SztsTCxQuYf3IVodBrD2y0i4rR4UM1fUZTZQjaX58l9pwAYniL4F6Udn+Bf47HTxXg6R0c8UpB9gtyPMJbMFvz606Fzljd30+CvKPOM7UdGChbOkSlkickaBV+bZdcT/E+NpwNtvWCZSGXpiIdJRIPX/KczvN1LVyKqwV9RlNnD1r0nC99PlfnXCv42858q4N7/zGEu+9hDPLjjWKNLnZKxVJZ2r+YfUPDP5vJMZnLTaudscdo6+3++dz26jzt++vK0XzsINPgryjxj655TBY18StnHLaD6FVLrKfg+uL2fP7/3VxgDx0aCt4ROpHN0xLxun2AkKLt5bDrtnC21NP/vPXeU+546PO3XDgIN/ooyj8jlDY/vO8XVGxYBMDJZW5aomfm7F4RklWz75y+d4I++8TQX9HUBrdnwNJHO0h4P3u0z0kRfH0tnjSHumZzh1Hh62q8dBE0HfxFZLSI/EpGdIrJdRD7sHl8oIg+JyEvu1wWe59wmIrtFZJeIXN/sGhRFqY8X+kcYTWZ548blQP0FXz+3j30s5XNheO7QML9/1zbOWtLB199/BSGBiVTwhWHH6hkmHrDmX+jo2ZTmXz3zz+TyDE60pg5SL0Fk/lngvxljLgCuBD4kIhuBjwKPGGPOAR5xf8Z97GbgQuAG4HMi0njnJEVRGmbrHsfl89rzlhANS90F36SPnFLL6vntZw6TM4a73n85ve0xOmKRQvvloLDD2zviEWLhYGWfZto5WxKRcFVJLJ3Nk8mZGZ3z23TwN8YcNcY85X4/CuwEVgI3Ane6p90J3OR+fyNwjzEmZYzZC+wGLm92HYqiTM3WvSdZvbCNFb1t9LRF6y/41vD5+wW4iXSO7kSUpV0JADrikcAzf/v+HTGn+VosHAos82+mnbMlGglV3UWddo8PzqD0E6jmLyLrgEuArcAyY8xRcC4QwFL3tJXAQc/TDrnH/F7vVhHZJiLbBgYGglyqoswpTo2nmx4JmM8bHt97iivWO3p/dz3Bv4ajp5bPP5XJ0RYrhpf2eDjwzN9OyWqPO8JBPBIKTPMfLQxvn37wj4VDZHKGvM8uarsf4eSZEPxFpBP4D+BPjTEjtU71OeYrfBlj7jDGbDHGbFmyZEkQy1SUOcep8TTXfuJHfPUX+5p6nd0DYwxOZLhi/UIAuhPRKXf4Jmtk/skass9kJkciUlRzO2KRwEca2tfriDkBOh4NBS77NLPJyzqqMvnKzydTI/MfT2VPy+zfQIK/iERxAv/dxpj73MPHRKTPfbwPOO4ePwSs9jx9FXAkiHUoypnIPU8cYCyV5VeHhpp6na17HH+/zfx72qYO/rXdPrX7/rTFisG/PRYubCwLCju8vT1mM/9wcAXfJoa3W2wdwm/XcSbn5Lt+mf9/+cKjfPKhF6f9vvUShNtHgC8DO40xn/Q89B3gFvf7W4D7PcdvFpG4iKwHzgEeb3YdinImks3l+fqj+wHYfXysqdd6bO8p+noSrF7YBlCX5m99/uUB3hjjCf4+HT8zORKeCVid8Uih935Q2MzfSjPxSJCaf5ZwSGibxhQvSzTsiBw20HuxF4TyzD+fN7x0bPS0tMme/mWtyNXAe4DnROQZ99hfAR8H7hWR9wMHgN8EMMZsF5F7gR04TqEPGWNm55ggRQmYk2Mp0rk8fT1tdZ3/8M7jHBlOsmFJB3tOjJPLG8LTmCwF8NT+QV65biFOvgbdbZEp+817HT35vClMtUpl81iXop/UMpnJ09tWdMq0xyNMnGhR5u8G/1gk5Gs7nQ5j7hQv+1lNh5gre/ll/rbgW+71H5xIk82b0zIzOQi3z8+NMWKMeYUx5mL3z/eMMSeNMdcZY85xv57yPOd2Y8wGY8x5xpjvN7sGRZkr/P13d/CHX3uy7vPv/OU+Vva28f5Xn0U6m+fQ4MS03jeZyXF0OMnZSzsLx2zmX81rnnODkJVVvHZP751AtV7/tt8OQEcsHPgmr7GC5u/KPtHGZJ99J8b55hMHfB9rtq8PeDP/0jUZUwzu5cF/YCwFFC8OrUR3+CrKaaR/OMnhocm6zt3VP8qje07y7ivXct5yJ2i/PDA96eeI+56rFhTvOHraouTypqoWbwO8HZHoDfKTGf8Lgfdxr2TSHosUMvWgsDJSe4nsU/97fOnne/jofc/5unHG3YZxzRCrMmAm63m/wYnS4H98xA3+cyHzVxSllHu3HeTNn/mZ72MjySynxtNTDlEBp/lXPBLi5leuZsMSJ/hPV/c/NGiDf3vhWLe7gala0dcG+AUd0ZKfodT949feIVlW8O1wrZ5B7mi1/XcKmX+Dmv+OIyMY459lp7KlNYvpYPsNlWf+3p/LC77HRzX4K8qc5fnDw+w8OuKbEY8mM+QNDE3U9ncPT2a476nDvOOiFSzoiNHbHmNxZ6zp4G+LveBk/va9/LAB3mb+3oBvLwQi/u0dJjO5Qr8dcDZ5GRPsyMdC5h/zZP51vn4ub9h5dBTwt6qmsvlC8J4u0SpuH+/P5QXf46NOoXfOWD0VZT5Szb1ig6lf6wSbZU+1uecH2/uZzOR471XrCsc2LOlsIvhPEA1LYcctOJu8vOstp1L2qZR6uhNR3+BZkfm73we50csOb7fyimP1rE/22Xti3FPM9ptVkC+5eE0Hu67yOwv7cywcqsz8R1TzV5RZzU9eHOCSv3+IE26Bzksh+Jd1zMznTaFI6fc8L9bqd77bERPg7KWdvDwwPi3p5NDgJCt620qcQjbzn0r2WdhRGfwn005wWtAerbjDyeacvjXlmj8Q6EYvO7zdEo+E6g6aO44W96H63S2ksrnAMv9Mlcx/aXec0WTphq4BlX0UZXaz48gIqWy+UEj1Ui3zH09nsVL/ybHamf/gRJqueKQQQMAJ/sOTGU5M8Vw/Dg1OlBR7YWrZx0o0ve0+mr/7fU97rCLztzWAErePG6THA+zvY4e3W+LR+mWf7UeGC9/7Zv6ZfKFT6HQpFHwrNH/nl2B5t3MX5i36WtlHM39FmaX0DztBf2iiMnAWM//Sx7ye+ql6uQ9PZOhpL+0o2UzR9+DgJKt620uO2YJvVc2/IvOvdPv4Zf62NuCX+Qe50csOb7c0ssN3x5Fi5u9XhwhE9pki81/mBn/v74Jm/ooyyzniyjJDPoHTBv3yoOod6XdyCtlncCJd0Not1qO/u0G7ZzKTY2A0VZH5dyUiiFB1o5cN4r224OvV/D3F4GQmVyJF2YuB1y3T4doxg2zxMJ7OFWyeUL/V0xjDjiMjLOuOA9UKvs3LPtU0fyvz+AV/6/bRgq+izFKsJj9c5toxxnhkn9Kg6q0BnJgi8x+cyBTkFktfT4KOWJiXG8z87b6CVQtLg38oJHTFI1U1/6Qnu4diwAeP7NMWJW9KWxjY55VbPcEZuB4UE6lsoZAMRavnVDWR46MpTo6nuWS1M1+qesE3IM2/SsF3eY9z8bHBfyyVLeyF0MxfUWYpR23mXyb7TGZyhUBYHlQbyfyHJzOFjNsiImxY2tnwRi8/j7+lu0Zzt8lyt0+2Mvjbx1I+j5V39YRgRzmOp3OFOwpwdvga499Lx4vV+y9d2wtUyfwzeeJN+vwLmf8Uso+1ex53Zxwv7IhN+XcIgiB6+yjKnObUeJq9J8Y5PpLk2EiSs5Z0cu251VuIp7K5glunXPbxSj3lBV/784qeRF0F3wXtlVOkzl7SyaNud856sS0hymUfqN3czco+VvMv8fkXJCH3riCTx7pIrYZe3tUTCHSX77hP5g/Ov0+sRta+/bCj91+0yg3+ZZq/MSYgt4/jrEqXBXJ7J2Btt9buaSWfVQvaeMHdg9BKNPgr85qJdJZr/ucPS7ToJV1xnvjrN1R9jvViQ2XmXxL8ywu+ruyzbnFH4c7Bj3zekY68jdEsG5Z2ct/ThwuNx+rh0OBkhcffUjP4l2X35S0d4pFQoaib9HEC+Wv+ARd8yzR/cDL5rmpPwrF5rlvUzqJOq/mXWVXzhryh6eAfD/s3drM/t8fCdCcixczfE/yfPTSMMaapxnJTobKPMq/Z1T/KeDrHX7zpXL73J9fw+9es58RYqqbm6g3cw5OlGfzwhDf4lwY6K/usXdRR0+c/ksxgDBWyDxQdP43o/n4ef0t3Ilp1jm8yk0PEsWzGIqESV4zdxOU3ON3eFXitnvFIiHBIAh3laIe3F98jXLEWP7YfGWHjiu6Si4UX+3NQm7yqtXeIRUIs6oxzyv2dsbLPyt4297zWSj8a/JV5zQv9zu31jRevZOOKbjYs6cSYot/aj6OuzXNpV7xq5h+LhHxknyzxSIgVPQlGk9mqFxj7muUFXyg6fhrR/Q8NTrDaR++H2pl/0m3OJuL0tS/P7tui4UJ2733MZtJeq6eIuANdgsn87fD29jKfP/i3m7CMJDMcODXBhSt6fC9c3ucnmvT5F2Sf8szfDerRcIgF7VFOjTuJwMBYilgkxJKuuHtea4u+GvyVec0LR0fojEcK2dayHkcaOTZSK/g7j13Q111V81+9oK0iqI5MZuhuixbkhmpef7vpp9zqCbB2UTuRkDTk9T80OOmr9wP0tNeWfWwAT0RDZb198iXBv6Tgm650+0CwoxwLw9vjfpp/9aC50/X3b+zrLq697GIRVOYfCYcISXXZJxYOsbAjzqlx5/MfGEmxpDNedX9A0GjwV2YFf/7NZ07L6LpydvaPct7yrsKQErvrsn+4uizTP5ykKx5h5YK2qpn/6oXtPm6fLN2JCIs6naBeTfqplflHwyHWLmqvO/hX8/hbuhMRkpm8/0CWdL4QINui4VK3T9rpelkIuD4bwBJlwdMZ4h6M7FOY31ui+U8t+9i2DnXJPk1m/uD8e9WSfRZ2FDP/46MplnTFiVbZHxA0GvyVGeepA4Pc9/RhvvjTPVX151ZgjOGFoyOcv7xYHiwE/5qZ/yTLexL0tkUZnkyX+MpHJjOIOLpthc8/maErEWWxG/yrNXcbcusIfpo/ONJPvRu9atk8wdvfpzIjd0YxOiEiEQ2XtnF2Nf+C7ONj9fTL/IPy+ZcPbwdP5l9D9tl+ZITFnTGWdhUz7Mrgnyt5vWaI+bSZtpl/NCws7IgzOO4M1Dk+mixZV6u9/hr8lRnnCz9+2ZEVMjm+/fTh0/a+R4eTjCSznN/XXTjW2x4lFgnVlH36h5P09bbR2x4lkzMl9sXhyQxd8Qi97Y5/vuTCkMw6sk+HI/tU8/oPujKAn9UT4Lzl3ew7MV4SjKtRy+YJtTt7eoewJ6Lhkr79Rc3fCSGlxWArm5SGl44AM//y4e1AVQ3fy44jI1zQ142IICK+u4JTmWBkH3Cknaky/3Quz3g6x/HRFEu741V3BgeNBn8lUPYMjPG1R/fVff5Lx0Z5cMcxbr12A5tX9nD3YwcCHfhRixf6HQngAk/mLyIs707UHKB9ZDhJX3eC3jYnM/fq/sOTTk+e7kSUbL70wjA6maErEWGhzfyreP2H3LuHroR/8N+0opu8Ke1MWY2pMn8b/P3uuCbTRc2/LRou3eFbkH0qC77eQrGXIDX/6cg+46ksLx4bZfPKHs9zKpvBFTX/YDL/yoKvzfxDhbpO//AkQxMZlnYlNPNX5iZf/vle/ub+7ezqr2+Tyhd+soe2aJjffdU6fueKNew6NsqT+wdbvEoHO8zj3OWlrvDl3Ymqsk86m+fEWIrlPYlC4zXvYJbhyQw9bdGinJIs3fTVnYjSFY8QC4c4MV5N80/T0xatOqh98yoneD1/eNj3cS9Fj3/c9/FanT0nM8VpVvbOzFKUffytnn5OmfZ4cKMcfTN/zyYvP7btHySbN1x51qLic3zm/hZknxZp/t6Cr63/7Op3ZLylXfGqO4ODRoO/EihPHxgC4L6nD0157uGhSe5/5jA3X76ahR0x3n7RCjrjEb6x1X+odtDsPDrCqgVthe6WlmU9iaqyz/HRJMY4fXbsJiyvt98G/24fLd2RfSKICIs6Y1Uz/8EJ/w1eluXdCRZ1xOoM/hOs7G0rFLTLqdXTP+lx+7TF/KyeoUJR1y/zL6cjFg4u809XZv4xn+Kzl8f2nCQSEi5bu6BwrOWyj8+MgUwuTyQkhEJSyPx3uXehS7vjVXsCBY0GfyUwJtJZdh1zsun7nz4y5ZzaL/50DwAfuOYswPmP/GuXrOS7zx2tGG/XCl7oH+UCj95vWd4dp3846Ss/WTnI0fyryD5t0eJ8XDfzT2ZypLP5wvFFnbGqVs+hiXTVYi840tSmlT08V2fmX03ygdpzfJNlmn95P/+2aHGTV3m750TMJ/gHmPlPR/Z5bM9JXrGqp+w5lQXZIGWfaDhEOlv6e5TO5gsB3tZ/7H6TpV0JzfyVucdzh4bJ5Q2/fulK+keSPPpy9R40g+Np7nniADdevLLgsQd41xVrSGfz/MdTU985NEMyk2PPwFiJ3m9Z1p0glc37SiG2lXNfT6JgxRwqyfyzbubvBBh7VzDqOn+6E87xRR3xqgXfoYlM1WKvZdPKbl46PuY7J9hLLY8/TC37FH3+ZZl/2gnwiUilzz+ZyVXYPMHN/AMa4l4+vB1qyz7jqSzPHRoukXyc54R9NP9c4bFm8c/8TSHAL+hwPn+bNJXIPpr5K83ys5cGCsXNINh3YpxPPfRixX+yZw46ks9fvOk8uuKRmtLPT14cIJnJc8ur1pYcv6Cvm0vX9PKNra0t/O4+PkbeUOL0sSzvqW73tENclvckCoHTWjONMYWNXOWav/1q5aBFnbGqE7mGJmtn/gCbV/aQy5tCxuhHMuM0oKsV/GNufx7f4J/Olfr83SCZzxtSWWeTVygkxMLlrR/yFTZPcDR/Y0p7BE2X8uHtUNTo/TLmJ330fvucCtknQJ9/LCwVm7VSnsy/Mx4hGhb2n5xAxGmid8YXfEXkBhHZJSK7ReSjM7WO2UY2l2f38dEpJRMvR4Ym+Yfv7SzIKF6OjyZ5/53beNcXt9a0L9bLocEJ3vXFx/jMIy/xw53HSx57+sAQaxe1s6K3jbds7uOB5/urTm7auvck3YkIF67oqXjs5svXsOfEOL86NLWsMV12uk6Z830y/+JGr8rP6+hwks54hO5ElIRrdbTZfTKTJ53Ll8o+ZVO9utzMf3FnnJPjKd8L3NB4ZS//cja5jpVa0o91+qxeWF32Aehui1Tx+Zdu8pp0h7Yky9o3xKMh39YP5RSGuAfQ32c8nSMalpLundV8++Cv94O/7GP/LoG5fXw0/5jb+kFECl1TF3XEiXgG0p+Rmb+IhIF/Bd4MbAR+W0Q2zsRagsQYw6nxNPtOjPPcoWEefflkSTFwKk6MpfidL23lDZ/8KVf8wyP87f3P8/jeU+SrXAh2Hx/jL/73r7j2Ez/ijp/u4R++v5MXj5Vmgl/5xT4yuTyT6Rx/es8zVS8q2Vyebz99mC/9bA/ZKr90x0eS/M6XtjKWytKViPCD7f0ljz9zcIiLVzttcn/t0pVMpHM8uP2Y72tt3XuKV65b6OtoedPGZYRDwkM7+n2eGQwv9I+SiIZYu6ij4jHbZ93vYtk/nCzcGQD0tsUKso/NnnvaooUgP+wG1aLs42b+HTGSmXyFBp7J5RlNZQs20mqsdPcZbK8R/A+emiicWwu//j7ZnHMh87Z3yOUNmZypaN/gjE8st4H6uH0CHOU4kcqWZP3gtFOIhMRX9vHT+4trb01jN6ju83kgAiYAACAASURBVPdetGzR1zqyYqep4DtTLZ0vB3YbY/YAiMg9wI3AjqDf6IHnjzI0kSFvwGAwBsIhIRwSIiEhHgnT7u5U7IxHCja9rkSkqkMCnFvfoyNJ9p0Y59lDwzy5f5BnDg5W3MpHw8Krz17MWzb38aYLlxfkgHKePTTEH3ztSU6Np/nzN57LzqMjfPOJg9z16H7eftEKPv3Oi0sC5Q9fOMatdz1JJCy8+8q1/JfLVnHzHY/xyQdf5AvvuQxwpIavP7qft2zq4zXnLeEj33qWz/94N3/0+nMKr5PN5bn/mSP8y492s/fEOACP7DzOZ3/7kkKDKXD60Lz7y1sZGE3x9Q9cwTe2HuAH2/tJZ51f5KPDk/SPJLnEDf6Xr1vIyt427nv6MDddsrLk7zowmmLPwDjv3LLa97PobY9x+bqFPLj9GH95/flV/w2a4YX+Ec5b1uV78VlWo8XDkeEkfd7g3x4tyD7e4B8Jh+iMRypkH+vdt9neybF0SUCyr2G14GqICJunKPo+tvck0bD4Slte/Dp72g1dbbHiDl/neK6ifUOibHB6MluUi7wU2joHkPmPlXX0tPj59sdTWZ49NMyt155V5Xx/t0+tmQD14hR8K62eVvYBCnbPpe5YyehpKvjOVPBfCRz0/HwIuKIVb/SPD744rYHXItAVj9Dt3sLHIiHyxpA3hmQmz8FTEyUZw/rFHbzm3KVcuKKb3vYoXe5zfrn7BN999ig/2vUsf3P/8/zGpat439XrOHtpF8YYXjw2xsM7j/GZR15iSWec//jgqwq39GOpLF/62R4+/fBLdCUi3H7TJkSEJ/ef4r/e/RQX9HXzlfe9ksVuo7APXLOeTz/8Er86OMRFqx3dfDSV5Q9fs4FNK7v5xe4TfOrhl9iybiEAD+04xgPP93N4aJIL+rr5wrsvZSSZ5W++/Txv++ef8al3Xkwyk+OB5/t5aMcxJtI5vvK+V3LpmgWcHEvzrScP8diek1x77hKecS2eF69xbqtDIeGmS1bw+R+/zPGRJEu7iwHziX2nALh8/cKqn/+bLlzG3/3nDvYMjHGW28Y4KIwx7Dw6yhsvWOb7eCwSYlFHrKrmf96y4qCXnraob+YPTnG3KPu4mX9bUfYBODGeYs2ioixj9wxMpfkDXLiihy//fI87eKQyEP5k1wBb1i6csu9/T1u04u9aPoTdZvnJdK44o7dk92/pfF9f2ceOcgwg8x91W2WU4+fbr6b32/PLg2wq60hK1fZZNILfJq96M//yITBBM1PB3+9TrfibisitwK0Aa9asmdYbfeMDV5AzBkGw/5Y5Y8jlDdmcU7iazOSYSGcZS2YZnswU/oxMZhh1j2XyhpBAWIRoOMTrzlvCusUdrF/Uwfl93YVMrpzXnLuEj775fJ45OMQ9jx/kfz95iLu3HuCSNb0cPDVRuFN49dmL+czNFxc6PoJTDPrTN5xLKpvn8z9+mUUdMd72ihW87ytP0NfTVhL4Ad7/6vXc+ct9/OODu/jie7fw5Z/v5dVnLy5sCvrYTZt45uAQN9/xGOD8Yr5qwyL+9u0beeMFywp3OptW9PDBu5/kXV/cCjgXwddfsJT3XrWuoJlec85i2mNhfrC9n2vPXcLTB4eIRUJs9GSZv3bJKv71Ry/z7WcOc+u1GwrHH997ivZYuHCR8+ONG53g/9COY/zBa4IN/gOjKU6Npzm/r/rIj+U+Xv9MLs/x0RTLe4oySm97lH0nHHmlIvi3FTNq28vfa/WEyl2+haZuNXz+ls0re8jkDC/2jxX+jS39w0le6B/ltjdPfefU0xYtuE0s5UPYbZY/mckxmXbvCqzmX9br39sWwkt7gKMcx1JZOhOV4cvPt19N7y+eXyn7BCH5gBPIy7X7VFnmb2OHHbZzugq+MxX8DwHee/5VwJHyk4wxdwB3AGzZsmVal0FvxjlTiAiXrFnAJWsW8JEbzuPfHz/AA9v7ueacJVy1YRFXnbWoZlHuI9efx+B4mn/+4W6++ot9tMXC3PV7l5cEfnAkhf/62rO5/Xs7+av7nmNgNMWn33lxyeP/6z2X8fXH9nP1hsVce+6SCg0UnI6H//nHr+beJw6yYWknr9qwqOI/QyIa5jXnLuGhHcf4Hzdu4ukDg1y4orskozl7aSeXrV3APY8f5PevOauw3X/r3lNctnZByX+AclYtaOfCFd08uOMYf/CaDVXPmw47XYfM+curyyHLuxMV07YGRlOFDV6W3rYYQ5POXU9l5l/U0keSGcIhKexILbZ1LpWWBidsX5+pM//NnqJvefD/yYtOMf4151UfR2nxm+NbPoS9kPln8sXGbR4baPkMX3/ZJ7hRjmOprG/C5RfMq+n9xfPL3T7Nj3C0VM38/YK/K/uc6T7/J4BzRGS9iMSAm4HvzNBaTiuLOuP80evP4bt/fA2feufF/NaW1VO6MUSE239tM2/d3Ec4LNz1/surPuc9V61lWXec+54+zOaVPbxqQ+mt7vnLu/nYTZt58+Y+3/8Mlu5ElA9ccxavO29p1Szo+guXc3w0xbb9gzx3eJhLVldmVr/tOne27nWknuGJDC/0j3D5uuqSj+VNG5fz1IFBBkZrDztvFLsz1s/pY/Hb5WuHuFRo/tVkn7ZoQe4ZTTpFcnsBXNRh2zqXZ/7pwutOxeqFbXQnIjx/pFL3//GuAZZ3JzhvWa2BhhTWOZrKlhgLygO8/TqZyXm6dtp6QDHzN64s6hv8Y1bzDyDzT/qPsSz37Vu930/y8Tsf3OHtAQV//5bOpiRJKmb+pcH/jNzha4zJAn8E/ADYCdxrjNk+E2uZK4RDwr+86xIeu+26mhlrIhrmT65zCroffO2Gls4Afd35S4mEhM8+8hLJTJ6L1/RWnPPWzX10JSL8++NOy4Yn9p3CmNp6v+VNFy7DGHhkp79jaLr8eNdxNvZ1s6CKVAdO5n9yPF2SFR4tbPAqyj497VFS2TzJTK4Q/K0W3d3mKfi6Td0s1mBQVfapI/jbnb7lbR4yuTw/f+kErz1vSV3//j1tUYwpOpKgUvO3nvfJdM4zprHo9rF3Cjbr9tf8rdun+cx/xL2YllPu23/qQHW9v3i+j+zjs/7p4NvYLZsvTPmC4i7fJa7sEw6J7xCYoJkxn78x5nvGmHONMRuMMbfP1DrmEiLim1GV867L13D/h67mzZuWt3Q9PW1RrtqwiJ/vPgFQcPp4aYuF+fVLVvL95/oZHE/z+L5TxCIhLvI5t5zzl3exemEbD+4ILvgPjqd5cv8gb7hgac3zrNffO6zd+v7LrZ7gBG0b4G2hsDtRlFNGktmKHkKLOmOcrJB90kRCUvdw9k0re3jh6GhJoHj6wBCjqSyvOXdqycdZZ8RdY1H6sdl9vCzzT2aLBV+vDdQGUL/5vRYreQUxynEsVaXgWyb77HMdbNXqO3HXh++96wlS9nEy/1LFurzg+/rzl/I3b9tYsEmDc9E4IzN/pbWICBet7m1p1m+5/kLnArO4M1Z1J+nNl68hnctz39OH2br3FBev7q3rIiYivGnjcn6++0QgRUJwdhbnDby+itPHssxnl++RoSTtsXAhWEIxQx+aTBf6+li8csqo29HTy8KOyuZuQ5POBq96/+02rewhncuX7O/48a7jRELC1ecsrus1/Fo8lAd4r9unfFhLwpP5l28A82KHuDcr+2RyeZKZfHXZxxP8+0eShEPC4g7/rqZW0vQWZZ2Cb4Cafy5fspmv3OrZFgvz/levL3EXRcOVdyRBo8FfaYo3bXSC6MWrF1QNWBf0dXPx6l7uenQfzx8e5oo6JB/v66ezeX6yayCI5fLIC8dZ3BnjFTWcRuC/y7d/ZJK+nkTJ39O6coYmMhXBvyCnpLKMTFbKFIs64hWjHKdq6lbOlrULiISEv//PHYWs+8e7Brh07YKKi001/IK/X4C3xysloWLrh2rze8EzxL1Jn7+9ePgH/1LZp384xdKueNU9O35jKB3NPyi3j/O+3uw/XZb5V1vXGbnDVzlzWNqd4L+/9QLfDTRe3nX5GvafnCCXN3Xp/ZYt6xbS15PgCz95uaGWF35kcnl+sus4rztvac0NfFAM/rboa4xhV/8oK8p2y/a0Vw/+BTllMuNk/mX2zcWdsYpRjkNTtHMuZ0VvG59658U8sf8Ut35tGwdPTbDj6Ejdkg94Brp4g3+ZndPP7ZPwWD3tRqmCXFQleHbEIk37/G1twtfqWbbh7NhIsrBpz4/i9K/iBSOVzQXS1wfwbdVQ7vbxfV44pAPcldnPB645a8qA/raL+uiMO3r4pWsqXUHVCIeE295yAc8dHuabTxyc+gk1eHL/ICPJLNdNofeDU6xNREOFzP+JfYO8PDDOWzf3lZxns/ThKrKP81imquZ/ajxdojcPTmQayvwB3n7RCv7nb7yCn710gnf+r0cBeG0dFk9LPbJPwuP2SWVyiBSz5oRnY5W9A/DL/CGYUY7lHVK9+Mk+y2sFf5820IHKPrZVg+f1y2UfP6Ka+StnCu2xCH/4mrP49UtW1rSY+vH2V/RxxfqFfOIHLzTV5/+HLxx32m2cM3VgLIxzdDP/O3+5j562KDdeXNqqopbsY4P90ESm0A/Jy6KOOLm8KSm0OrJP/Zm/5be2rObv3nEhR4adIeAbp2jp4MXuKfDehRSye4+dE5yLgm3cZuWvRNQJVLm8qbholNMRb36U41hB9qlW8C1eXI6NlPZi8jsfyjP/4GSfqG/mb6aUfWI+bSGCZqY2eSnzEG9PoUYQEf7uxgt562d/zj89tIuP3bR5Wq/zyM5jXHnWorqdNMu6Ha9//3CSB7b38/5Xr6/IaNtjYaJhYWjSX/MHp+sqUCH72F2+J8aKOn89vfyrccur1tHT5nQbbaTY3xYL09seLalvJDM5QlLMXGPhECEpDf6WuKenf3k9oJz2WJiJJjX/sZRzsfSTfWIet89EOstoMltb9olUDqMJdJOXz27dujJ/n/0BQaOZvzInOH95N++5ci13bz1Q1/jCcvadGOflgXFef/7Uko9leY+T+X9j637yxvDuK9ZWnCMi9LTFODacdCZ1lcg+TnA6NOi0fyjP/G1QOnDKsSPawNqo7OPlpktWcsM0LL7OjubJws92eLu9iIiI09Y57bR38Lq1CnN8M/mC28fP6gnuEPegNP9qBV83kBetuf5OHyhaWUtkn0y+ZZq/Maaugm8som4fRSnwZ288l4XtMf7yW88W+vHXyw9fcNodNBT8uxMcG07xjccPcN35S0sasHnpbY+y322f7Kf527765Zr/xat7aYuG+dELjpPJau7TkX2aZUVvW0k7C78WDbaBW7Ksd09Jx8+yDWDlBDHKcWrN35k7YCW7ZV0zJ/uUZ/7W9RML174zU5+/onjoaYvy8d94BYcGJ3jLZ3/Gh+95mv0nx6d8Xiqb47vPHuHspZ2+/fursaw7QTqX58RYmvdeta7qeb1tUfafrAz+nbEIIp7g31YarBLRMNeeu5iHdx7DGMOg29qhnr4+QbO8J1FX8J9M531kn6J0Ut4TqJyOePND3Auaf5XGbnkD2bwpOLWW1aX5t0b2KR/Gbr9OJfuo5q8oZbxx4zJ+9pHX8YWf7OGrv9zLd589ypa1C7j67MVcffYiXrGqt+Q/1o4jI/zZN59h17FR/q+3NzYvyBYKz1rcwavPrr5hqrc9yon9jl/fG/xDIaE7ES3IPn6+++suWMYPth9j+5GRQkbbiNUzKFb0JDg1nnbm77rzessDeFvMOT5Z1rK5kPl7+v5Uy/zbY80XfEfdJnm+G8k8oxztPIa63D6e3kRBb/Ky64Fi8K9H9hma1OCvKCX0tsf46JvP5/euXsdXf7mPn740wKcefpFPPuQUGi9e3cuWdQsQ4PM/eZne9hhf+d1X8roGJB+gsGP5PVetrbkvoMczdat8WE93W4TDVWQfcGQoEWeAznnLOwt/v9ONbVPdP5xk3eKOigAPtoGbE+C9tY2C5p/NFwqniSrBrSMWZiKTI583U+61qIZt6uZX1PZaN4+NJOmKR2q6y8p9/pmcM/ApqN4+0XCp5m8vAlMXfIVM9szs568oTbO0O8FHbjifj9xwPoPjaR7dc5LH955i2/5T/OuPdpM38JbNy/nYTZurzluoxeaVPXzlfa/kmhpZP5Rq9BXBPxHloHGCv18jssWdcS5ds4CHdx4rtPSdEc3fvcs56gb/ZCZfEfztHN9kJsey7mIR1QZce2GIhUNEqgQ3O8Q9mc1VjGGsl9GUf0dPZy3FYH5sJFlT8ik9P194nvd4s5Rn/um6M/9wy33+GvyVM4IFHTHesrmPt7ibsMZTWfpHkpy1uGPaPY5EhNedN/XdglemKQ/c3ouBX/AHuO6CpXzigV3scucMzJTmD8W21eXZPThSzlgqW6H5l+wBSNfeHesd5Tjd4D9WpaMneDL5TJ7+kWTJRcr3/LJNXsX5vQFv8nILvfYiUM8O3zO2q6eitJKOeIQNSzpPS3M7b8Av7zRppZ6OWLhqNmzHSX7nV0eIRUJVbZKtxLaptkXfZCZHW9k6Em4Pn8l0aT3AG0BTWf8RjhY7d7eZFg+jVXr5l6/l2HDt1g7gvViUtqQOzO1Tofk7F4EpC74R0R2+ijLb6XEzdW87Z4t1+Pi1H7Y4LqR2To2nWdBAR88gaYuFWdAeLcn8/WQfK+0kqhV80/4jHC1BjHL02y1tsRn7ZCbnjNycKviXyz6FVtbByj7lbp/ZsMNXg7+iNImVfcr1fihm/uU2Ty8iwnXnO9n/TEg+luU9bYWNUZPpSqun3eSVrGL1TLlN3xI1suYgRjk683v9L6Y2Yz8yNEk2b2q2doCi/NIq2ccObUmXvX5Uff6KMvexso9f8PfO863FGzYurfoap4sVPQmODLnB39fnH2IslSWTM75Wz1Q254xwrCPzb8buWVP2cTN2u+9iKtlHREr6AbVM9mkw849q5q8osx87zcs383ePVZMpLK9ct5DuRKTQ72cmsO0sAF+ffyIWLsg1fu0dbLvn8lqBl84ARjmOJjNTyj62ZcZUso99jvX5F2SfFvX2qbvgGwmRzZuSjq9Bo24fRWmSnhqZv5V7yp0z5UTDIb743i3TsqQGxYreNk6Npxn3ye6htFlbwqfgm3RtoItq/B3sKMfpav7pbJ5UNk/XFAXfA6fqy/zB8fRXZP4zrPl79wckQsHchZSjwV9RmqQrHiEktTX/qTJ/gCuqDBk/Xdgsea8797Zyk1fxZ+9j0bAzcNz2/ak1orMwxH2awX+8RmsHKGbs+09OEBJnYM5UlGT+Acs+0SqZ/1Run7hHLqpn5Ol0UNlHUZokFBJu2LScqzZUBu96Nf/ZQF+vE/z3uMG/XLtvqxL8RcQZ6JKp7PtTTnGI+/Rkn7EaIxyhmLEfGZpkSVe8qr225DmeDppBb/KKhAQRzw7fenv72DuGFur+mvkrSgB87ncu8z1u5Z6pZJ/ZgPX67x3wz/xLgn+sNHjFIyG3q2ftgq8d4j5dn78dfFPNOmsz9rypT++3zynIPplgM38RcYq3uVKf/1QXl/K2EK1AM39FaSGLO+NEw0LfFJbD2YBd454TY0Bl8Pfq4L7tnt2unrWsniJCRxND3Mfc5ndTFXyhPr0f3Lm/5VbPADfaxT3OnXplH78hMEGjmb+itJCFHTEe+rPXFJrEzWYSUWejl9X8y3caV5N97HOdxm65iruCcpoZ5TiV7BObTvAv0fyDlX3AGeXYcMG3rFDcCjTzV5QWs25xR13a82ygr6eNPdVkn1jY93twguVYMkM2X+kSKqc9Fp621bNWL39wNHa7yXqqDV6WEtkn4IIvlO7WTde7yats81kraOo3UkT+HxF5QUSeFZH/V0R6PY/dJiK7RWSXiFzvOX6ZiDznPvZZmYm97Iqi+NLXkyh6+ct9/jUy/3g0zJA7iWwqd0pHfPqjHEemkH2cTVvO+zeU+btB1g6jmSozb4RoRIqN3eos+MYLmX/rfP7N/g0fAjYZY14BvAjcBiAiG4GbgQuBG4DPiYj9jfg8cCtwjvvnhibXoChKQFjHD0xR8C2XfSIhhibqC/7NDHEvaP7x6gV0q9fXXfB1JStwMu1oWCp6NDWDN/MvyD51DHCH1mr+TQV/Y8yDxhh7CX8MWOV+fyNwjzEmZYzZC+wGLheRPqDbGPOoMcYAdwE3NbMGRVGCwzp+oLbP3++uYMgdQzmV7NPTFi2MrGyUsZQzxatW51ObNdca3F5+fqGrZya4+b0Wb2/+dDbvSFNTXFzKu4G2giCFyN8Dvu9+vxI46HnskHtspft9+XFfRORWEdkmItsGBgYCXKqiKH54XUkV7R08AbdC9omECgPoa3X1dN6jjSNDkzj5X2PUmuJVXMv0ZZ9UNhd4S+1YWEoy/3okJVsTmNGCr4g8LCLP+/y50XPOXwNZ4G57yOelTI3jvhhj7jDGbDHGbFmyZMlUS1UUpUm8RVK/rp7gFFXLNetENIxtQzNV8FzZ28Z4OlfQ7xthtMYgF0s8EqI9Fq7qCConUSb7BJ/5F90+6Wx+Sr3fPseup1VM+ekYY95Q63ERuQV4G3CdKV7KDwGrPaetAo64x1f5HFcUZRawoobsYzN6P1knUWMPQMV79DrvcWRosuEuprVGOFri0RDLuxN1z0Uo7+oZpM0TSjt0pnOmruAfn+1WTxG5Afg/gHcYYyY8D30HuFlE4iKyHqew+7gx5igwKiJXui6f9wL3N7MGRVGCw2b+Iam0I9rNW347eL3Z8lSa/8oFxeDfKLVGOFoWdcRZv7ij7teMR8JkcoZc3pDK5AJ1+oCTxXtbOtdzcTkdBd9mN3n9CxAHHnKvso8ZY/7QGLNdRO4FduDIQR8yxtjy/geBrwJtODWC71e8qqIoM0IiGmZhR4xUJleROYdCQiwSCiDzdy4wh6cR/EdTGZZ21dby/+m3LvLVl6th3UG2Y2g84EZq0TKf/1Qef6icA9AKmgr+xpizazx2O3C7z/FtwKZm3ldRlNbR15PgmNvXv5y2aLhK8K8/81/cEScWDk0r+I8ls5y1uHbYWtxZn8vHUhzlmCOVzQUu+5Rn/vXcWRQHv89S2UdRlDOPvp5E1ew9EQ35yj4lwX8Kt08oJPT1FqeGNYIzwjHYrjSlA+iD1/xj4cYLvtHTYPXU3j6KopTwe69eXzUwO5l/ZfDyBsxajd0sK1y7Z6OMJrNVB7lMF+8M4lQmz6KOgN0+JQXfxjL/WSv7KIpy5vGqDYurPtYWi/jKOvGSDWBTB7cVvW388uUTDa2rMMUr6Mw/Wib7BOzzL2nvUK/Vcw4UfBVFmUf85fXn0unTWiHhZrMhmbp1AcDKXqeukMnVFwxh6o6e06X1sk+4ZJOXHWJfi1BIiIREg7+iKLOD15+/zPe4zfzbouG6/PUrF7SRN3BsJMmqBe11vbft69MZ8FS00oJvazZ5eYe51Gsl9W4OawVa8FUUpWls5l/vvNniRq/6i76jKad9RPCZv1fzb4Hbx23vYIyp2+oJpRbRVqDBX1GUprFBv/HgX3/Rd9TN/LsD1/zLZJ+ge/u4F5Ns3rhWz/o+I+8dQyvQ4K8oStPYbHkqm6fFtpFoxOtflH1ak/knM62Rfby7dVMNZP6OS2j29vNXFEXxZP71hZS2mLOTuKHg37KCr7PmUff1W7HJC5xibyaXr6sgbp+nmb+iKLOahKfgWy8rehONyT4pO8Ur4IKvu+YRtyV1Kxq7gZP517vDF9zNYar5K4oym7EZf72aPzS+0Ws06QTnwH3+bjC2LaaD7u3jbc9cr88fnP0BmvkrijKrsTp5Q8G/t43Dg/UPdRlLZomEJPDMvBD8W5T5e/v0NGT1VLePoiizHZv5NyL7rFrQ2FAX29en3j799WIvXCPJFgV/b+bfwKa2aFg1f0VRZjnT0/wbs3vW08t/OkTDggiMTNqCb2vcPhNpp6t9rF63T0Qzf0VRZjlW2qjX6gmNB/+RZNa3tUSziDhSUiHzb5HPf9wtWNcr+8R1h6+iKLOdUEhY2hVnaXf9vfTtUJe6M/9UJvCOnpZ4JNxCt4+T6dvg35Dso719FEWZ7Xzvw9c0JMvYoS6H6g7+2SmneE2XeCRU2EEctOxjLyZjDWb+6vNXFGVOsLgz3lDgbHSoy2iLNH9wpJ7WuX2cz6TRzF99/oqinLE04vUfS2YD391riUfChU1k9e5SrpdoxJV9CgXfen3+mvkrinKGsnJB/cF/tAUjHC3ebD/wls7haco+4RApzfwVRTkTWdHbVhjqUosjQ5Oks3l622ItWUdp8G9Ne4eGZR91+yiKcqaysjdB3sChwdrZ/z//8CWiYeHtF/W1ZB3ebL9VBd/xlCv76A5fRVHmO5tX9iICN/3rL/jcj3cXsmMvewbGuHfbIX7nirV1T/1qFK+3P/AZvhWZf/2bvPIGcvnWtHXW4K8oyoyxcUU3//lHr+aytQv4xAO7uPYTP+LbTx8uOedTD79ELBziQ687u2Xr8Eo99RZk6yVWZvWsV1aKtniIeyB/SxH5CxExIrLYc+w2EdktIrtE5HrP8ctE5Dn3sc9K0I06FEWZU2xa2cO//e4r+Y8Pvor1izv4028+w2cfeQljDDuOjPCfvzrC7716HUu66t9A1ihW6omFQ4RCwYakaFnBtxHNH1oX/JsunYvIauCNwAHPsY3AzcCFwArgYRE51xiTAz4P3Ao8BnwPuAH4frPrUBRlbnPZ2gV84/ev5KP3PcsnH3qRI0OTHBtJ0p2IcOs1G1r63jYbD7rYC9Pf4Wt7ALXK7hmEb+pTwEeA+z3HbgTuMcakgL0ishu4XET2Ad3GmEcBROQu4CY0+CuKgpPt/tNvXsSKnjb+5Ue7AfjIDefR0x58Tx8vVucPWu8Hp3dQLBwqNnZrYIcvzNLgLyLvAA4bY35Vpt6sxMnsLYfcYxn3+/Lj1V7/Vpy7BNasWdPMUhVFmSOICH9xJAB/dQAAB/dJREFU/XmsWtDGD7b387uvWtfy97SyT9BOH0ssEir6/BuUfVq1y3fK4C8iDwPLfR76a+CvgDf5Pc3nmKlx3BdjzB3AHQBbtmxp3SRjRVFmHTdfvoabLz89SV8rZR9wAvmw2z6i3sy/UPCdqczfGPMGv+MishlYD9isfxXwlIhcjpPRr/acvgo44h5f5XNcURRlxigUfFsU/KNhKVg2G+ntA7PQ7WOMec4Ys9QYs84Ysw4nsF9qjOkHvgPcLCJxEVkPnAM8bow5CoyKyJWuy+e9lNYKFEVRTjtFzb91so+lXp9/dDZr/tUwxmwXkXuBHUAW+JDr9AH4IPBVoA2n0KvFXkVRZpRWyz7ebL/uYS4tzvwDC/5u9u/9+Xbgdp/ztgGbgnpfRVGUZikWfFuk+XuCfzTUYMG3RZm/7vBVFGXeU8z8Wyv7RMNS9yayObHDV1EUZS7TSp8/FDP/eou90Podvhr8FUWZ97Ra9ok2E/xV9lEURWkNp0v2acRKOmutnoqiKGcKp8vt00jHUM38FUVRWoz197dK849PI/O3F4xWtXfQ4K8oyrzndLp9Gn2OZv6KoigtovWyjxP0p6P5Z3I6yUtRFKUlFGSfFjZ2g8bcPvaCkVLZR1EUpTUsaI/S0xZl/eKOlrz+dKyedg7ArG/voCiKMldpj0V45m/f2LLXj01TVoqGpWXtHTT4K4qi4GTarWI6O3zBuWioz19RFGWOEpuGzx+c4K+N3RRFUeYotjd/tGHZRzN/RVGUOUtR9mlMWopFQqQ081cURZmbRKdZ8I2FQ7rDV1EUZa4Sb6bgq5m/oijK3CQacXf4Nhr8w1rwVRRFmbPEws4OYi34KoqizCOm097BPk+Dv6IoyhzFunwa3+EbIq2N3RRFUeYm02npDM7FIp3NtWJJGvwVRVFaTTM7fNXtoyiKMkeJTXuHr5DJquyjKIoyJ5nODF+Y5Zm/iPyxiOwSke0i8gnP8dtEZLf72PWe45eJyHPuY5+VVrbSUxRFmQX0tkcRgYUdsYaeFwuHW7bDt6mWziLyOuBG4BXGmJSILHWPbwRuBi4EVgAPi8i5xpgc8HngVuAx4HvADcD3m1mHoijKbKavp40H//RaNizpbOh50YjM2t4+HwQ+boxJARhjjrvHbwTuMcakjDF7gd3A5SLSB3QbYx41xhjgLuCmJtegKIoy6zlnWRehUINuH3eTlxMug6XZ4H8ucI2IbBWRn4jIK93jK4GDnvMOucdWut+XH/dFRG4VkW0ism1gYKDJpSqKoswtbK0gmw8++E8p+4jIw8Byn4f+2n3+AuBK4JXAvSJyFuB3eTM1jvtijLkDuANgy5YtrSl5K4qizFLOXd7FW1/RR74Fmf+Uwd8Y84Zqj4nIB4H7XAnncRHJA4txMvrVnlNXAUfc46t8jiuKoihlXH/hcq6/0C/3bp5mZZ9vA68HEJFzgRhwAvgOcLOIxEVkPXAO8Lgx5igwKiJXui6f9wL3N7kGRVEUpUGaHeD+b8C/icjzQBq4xb0L2C4i9wI7gCzwIdfpA06R+KtAG47LR50+iqIopxlpRRW5FWzZssVs27ZtppehKIoypxCRJ40xW8qP6w5fRVGUeYgGf0VRlHmIBn9FUZR5iAZ/RVGUeYgGf0VRlHnInHH7iMgAsH+m19Eki3H2QShF9DMpRT+PUvTzqKTRz2StMWZJ+cE5E/zPBERkm5/laj6jn0kp+nmUop9HJUF9Jir7KIqizEM0+CuKosxDNPifXu6Y6QXMQvQzKUU/j1L086gkkM9ENX9FUZR5iGb+iqIo8xAN/oqiKPMQDf6nARFZLSI/EpGdIrJdRD4802uaDYhIWESeFpHvzvRaZhoR6RWRb4nIC+7vyVUzvaaZRkT+zP3/8ryI/LuIJGZ6TacTEfk3ETnutsy3xxaKyEMi8pL7dcF0X1+D/+khC/w3Y8wFOCMvPyQiG2d4TbOBDwM7Z3oRs4TPAA8YY84HLmKefy4ishL4E2CLMWYTEAZuntlVnXa+CtxQduyjwCPGmHOAR9yfp4UG/9OAMeaoMeYp9/tRnP/YVQfXzwdEZBXwVuBLM72WmUZEuoFrgS8DGGPSxpihmV3VrCACtIlIBGhnno18Ncb8FDhVdvhG4E73+zuBm6b7+hr8TzMisg64BNg6syuZcT4NfATIz/RCZgFnAQPAV1wZ7Esi0jHTi5pJjDGHgX8EDgBHgWFjzIMzu6pZwTJ3HC7u16XTfSEN/qcREekE/gP4U2PMyEyvZ6YQkbcBx40xT870WmYJEeBS4PPGmEuAcZq4nT8TcLXsG4H1wAqgQ0TePbOrOrPQ4H+aEJEoTuC/2xhz30yvZ4a5GniHiOwD7gFeLyJfn9klzSiHgEPGGHs3+C2ci8F85g3AXmPMgDEmA9wHvGqG1zQbOCYifQDu1+PTfSEN/qcBEREcPXenMeaTM72emcYYc5sxZpUxZh1OEe+Hxph5m9UZY/qBgyJynnvoOmDHDC5pNnAAuFJE2t3/P9cxz4vgLt8BbnG/vwW4f7ovFAlkOcpUXA28B3hORJ5xj/2VMeZ7M7gmZXbxx8DdIhID9gDvm+H1zCjGmK0i8i3gKRy33NPMs1YPIvLvwGuBxSJyCPg/gY8D94rI+3EukL857dfX9g6KoijzD5V9FEVR5iEa/BVFUeYhGvwVRVHmIRr8FUVR5iEa/BVFUeYhGvwVRVHmIRr8FUVR5iH/P9Gg6Y2UZHlXAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "pl.plot(x.detach(), x.grad.detach())" ] @@ -1969,21 +1113,9 @@ }, { "cell_type": "code", - "execution_count": 181, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Parameter containing:\n", - "tensor([[-0.2067, -0.4198]], requires_grad=True)" - ] - }, - "execution_count": 181, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# w.x + b\n", "lin = torch.nn.Linear(2, 1, bias=True) # nn.Linear is a nn.Module\n", @@ -1992,20 +1124,9 @@ }, { "cell_type": "code", - "execution_count": 125, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "torch.nn.parameter.Parameter" - ] - }, - "execution_count": 125, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "type(lin.weight)" ] diff --git a/02-linear-regression.ipynb b/02-linear-regression.ipynb index 6c3dcd4..f74b15a 100644 --- a/02-linear-regression.ipynb +++ b/02-linear-regression.ipynb @@ -19,7 +19,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -28,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -56,20 +56,9 @@ }, { "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "np.random.seed(0)\n", "torch.manual_seed(0)" @@ -84,29 +73,9 @@ }, { "cell_type": "code", - "execution_count": 99, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(100, 1) (100,)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD4CAYAAAAEhuazAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAWcklEQVR4nO3de4xcZ3nH8d8zu3ZE2lQsdsDG60usXEQc9eIdmY3yD5QUnCqqISHFENFUxLiqjNq0/QPSSG4FioRoaWlVQ2XcCCo5SdM0yBG3XJCrSBUbvBMhaieYbpdsvLVFnM1SkIKy3p2nf+xMOh7P/cyZ9z1zvh8JvHvmzOyrEfzOe57znPeYuwsAkC+F0AMAAAwe4Q8AOUT4A0AOEf4AkEOEPwDk0GjoAXRq/fr1vm3bttDDAIBMKZVKr7j7lfXbMxP+27Zt0/T0dOhhAECmmNlco+2UfQAghwh/AMghwh8AcojwB4AcIvwBIIcIfwDIIcIfAHpQmlvUoeMzKs0thh5KTzLT5w8AsSjNLerOI1NaWi5r7WhBR/dNamLrWOhhdYWZPwB0aWp2QUvLZZVdurBc1tTsQughdY3wB4AuTW5fp7WjBY2YtGa0oMnt60IPqWuUfQCgSxNbx3R036SmZhc0uX1d5ko+EuEPAD2Z2DqWydCvouwDADlE+ANADhH+AJBDhD8A5BDhDwA5RPgDQA71JfzN7AEze9nMTtZse4uZPWVm/1X5d6zmtXvNbMbMTpvZ+/oxBgBA5/o18/+KpN112z4l6Tvufo2k71R+l5ldL2mvpB2V93zRzEb6NA4AQAf6Ev7u/oykV+s275H01crPX5X0/prtD7v76+7+Y0kzknb1YxwAgM6kWfN/m7ufk6TKv2+tbN8k6UzNfvOVbQCAAQlxwdcabPOGO5rtN7NpM5s+f/58ysMCgPxIM/x/YmYbJany78uV7fOSNtfsNy7pbKMPcPfD7l509+KVV16Z4lABIF/SDP/HJd1V+fkuScdqtu81s8vM7CpJ10j6XorjAADU6cuqnmb2kKR3SVpvZvOS/kLSZyU9YmZ3S3pJ0h2S5O6nzOwRSc9LWpZ0wN1X+jEOAEBn+hL+7v7hJi+9p8n+90u6vx9/GwDQPe7wBYAcIvwBoA9Kc4s6dHxGpbnF0EPpCE/yAoCESnOLuvPIlJaWy1o7WtDRfZPRP+WLmT8AJDQ1u6Cl5bLKLl1YLmtqdiH0kNoi/AEgocnt67R2tKARk9aMFjS5fV3oIbVF2QcAEprYOqaj+yY1NbugscvXvjHzj7n0Q/gDQAOluUVNzS5ocvu6jkK8uk9Wav+EPwDU6fUCbqPaf6zhT80fAOr0egE3S7V/Zv4AUKca4heWy12FeG3tv9NyUSjm3nA15egUi0Wfnp4OPQwAOdFtzT/WMZhZyd2L9duZ+QNAAxNbx4LO3NO+cYyaPwBEKO0bxwh/AIhQ2hePKfsAQITSvnhM+ANApNK87kDZBwByiPAHgBwi/AEEl7UHoQwDav4Agsrig1BiuAEsKcIfQFBZWgxNyubBqhHKPgCC6rafPXSJKItP7WqEmT+AoLrpZ49h1t3rom+xIfwBBNeun71aYz/7018ELxFlaeXOVgh/AFGrne2PFkyjIwWtrISddYde9K0fCH8AA9dNt0xtjX2l7PrQrs3a9OY3RTXrzmL3D+EPYKC6rdvX19hv3zkeVcDGcB2iF4Q/gIHqtrUz9hp71lpVqwh/AAPVS7dMzDX2rHb/8BhHAANXXyPvtGbebL/QNffQf78VHuMIIBq1M/lOa+bN9ouh5h7zmUkz3OELIKhO75httt+w3HE7aKnP/M3sRUk/l7Qiadndi2b2Fkn/ImmbpBcl/a67s5wfkEOd1syb7ZfVmntoqdf8K+FfdPdXarZ9TtKr7v5ZM/uUpDF3/2Srz6HmDwyvrNf8Y9as5h8q/E9Lepe7nzOzjZL+3d2va/U5hD8AdK9Z+A+i5u+SnjSzkpntr2x7m7ufk6TKv29t9EYz229m02Y2ff78+QEMFQDyYRDdPje5+1kze6ukp8zsh52+0d0PSzosrc780xogAPRTFspQqYe/u5+t/PuymX1N0i5JPzGzjTVln5fTHgcADEIMraedSLXsY2a/ZGZXVH+W9F5JJyU9Lumuym53STqW5jgAYFCy0nqa9sz/bZK+ZmbVv/Wgu3/bzE5IesTM7pb0kqQ7Uh4HgA5koVwRu6y0nqYa/u4+K+nXGmxfkPSeNP82gO4MolyRh4NL7AvRVbG8A5BjtWGc9uqUWamF90MWlnsg/IGcqg/jg7fuSLVckdWlj4cV4Q/kVH0YL762lGq5Iiu18Lwg/IGcahTGaZYrWtXC83AtIDaEP5BTtWE8dvnaN1oS+xG+zcK80cElT9cCYkL4AwlkfcZaHXM/w7fbMOdaQBis5w/0qBpyn3/ytO48MqXSXDZXJX/suXm9fqF/NyV1e5NTtfw0YuJawAAx8wd6NAwz1tLcov51+oyqC2eNjCQP324v7GalL37YEP5Aj4ahe2VqdkHL5dXoN0nv2HBF4s/sJcyz0Bc/bHiAO5BA1mv+tfX5sq8eAC5bw0XXYRJyPX9gaE1sHdOBd1+d2aCsztJvunq9TKsP34h5MTL0D+EP5NzE1jHdc/O1umwNF13zhJo/kHPV0tXv37hNp879TLfcsDGzZzLoHOEP5Fijmv+JF1/VdRuu4AAw5Cj7ADlW264qZavmX5pb1KHjM5m9vyI0Zv5AhiXtNqq2q1YPAAUlq/kPqvuJJSGSI/yBjOpHANav77P42lLPwT3IQB6GG+xCI/yBjKoNwKULZX3h6R/pnpuv7ekA0I/gTCuQG51NDMMNdqER/kBGjV2+VgUzubvKkv5j5hWdePHV1EsgzUo7aQRys7MJloRIjvAHMqg0t6hPf/2Uyu6yyt1ZgyiBtCrtpBHIrc4mWBIiGcIfSEHaFz5rQ7Fg0khh9Qwg7RJIu9JOvwOZ8k56CH+gz5rNjpMeEGrfXx+KB2/dkehibacGHcbtziayvrZSSIQ/0GfN1rNP0gnT6IASquZ9+85xeeXfQfzdZmcTtHsmQ/gDfdZodtxNJ0yj2Wyj93e7oFw/zjxqw/b2neNdf0Y/0e6ZDOEP9FmzUkUn5ZJms9mk5ZZ+zJJjC1uuByRD+AMpqC9V1B8QJOnQ8ZlLZuHNAjZpJ00/gju2sKXdMxnCHxiQaoi3moW3CtheOmmqpZ6xy9c2/NxuSkExhi3tnr0j/IEBa3ZBuBqqnZwhdKL+IFPfEdRLKYiwHR6EPzBg9bP7scvXXhLCB959deI6ff1BZvG1JR1499VNXw9dw8dgsaQzkEAvywpXyyd/+t7rdHTfpBZfW2p6JtBoe6eqB5lmT+dq9zqGGzN/5F6vLZBJZub15ZNG9fikF1jb1ehjrOFjcAh/5FqSAG9Xu+/mQNAohPsRzu1q9NTw8ytY+JvZbkl/J2lE0hF3/2yosSC/ktS9O6nd93om0G57rW7OXFgOAVVBwt/MRiQdkvRbkuYlnTCzx939+RDjQX4lKa3Uz8xDXEDt5syl3b4cGPIl1Mx/l6QZd5+VJDN7WNIeSYQ/BippaaWT2n2aujngtNqXdXLyJ1T4b5J0pub3eUnvrN/JzPZL2i9JW7ZsGczIkDv9qnuHuIDazZlLq31p+8yfUOFvDbb5JRvcD0s6LEnFYvGS14HYDPoCajcHnFb7xrZ0A9IXKvznJW2u+X1c0tlAYwFSl2Y9vZsDTqsLy7R95kuo8D8h6Rozu0rS/0jaK+kjgcYCpCrGenqjgxFtn/kSJPzdfdnMPiHpCa22ej7g7qdCjAVI29Tsgl6/UJZLWroQvp4e48EIgxesz9/dvynpm6H+PoZH7C2KY5evfeOCVrnye0hc3IXEHb7IuCzMYhdfW1LBpLKvdjqcPPu/Xb2/3wc3Lu5CIvyRcVmYxU5uX6fRgmlpxeWSHi3NX/T821bh3uvBrdVn9uvibuxnXGiN8EemZWEWO7F1THcUN+vBZ1+SS1pZ+f+DVLtw7+Xg1skBI+nF3SyccaE1lnRGptUvjxxrAN22c1yXrbl0+eR2yzb3suxy0qWgOzGIv4F0MfNH5g2iRTFpiaNZqaXdmUsvJZpBnA1l4YwLrZl7Nm6cLRaLPj09HXoYyKFeShyhV9qsfXZv7aMb+4mafzaYWcndi/XbmfkDbXRbd+/2YJHGmUv189Ksy3NTWLZR8wfa6LbuHks9PJZxIE7M/IEmassaB2/doW+dPKdbbtjYdrYbSz08lnEgToQ/0MCDz76kg8dOaqXsKphkBVO57Drx4qu6bsMVPa+eOUixjANxIvyBOqW5RR08dlLL5dVmiBWv/lfnvfax1MNjGQfiQ80fqFGaW9QXnv7RG8Ffy9R5r/2glOYWdej4jEpzi6GHgoxh5g9U1Hbp1KqG/gcnxi9aliE07rJFEoQ/ohBDz3htd0zBpJuuXq9bbtj4Rp98dR9JUYRsFtY1QrwIfwQXywy2vjvmnpuvjfoB53TzIAnCH8HFMoNt1R3Ty41eaZ/J0M2DJAh/BBfTDLZZd0w3Y6xtE71sTbpnCXTzoFeEP4LLwgy20zHWt4nG8NhGoBHCH1GoncHGcPG3kU5m2VOzCyrXLJZYKBi1eESJ8EdqegnxGC+sdqNaHlpaLqtgpk/vuSFT40d+EP5IRa8hnvbF37TPKrJQwgIkwh8p6TXE07z4O6izCi7CIgsIf6Si1xBPc+YcS0spEAPCH6lIEuJpzJxLc4s6+9NfaLRgWil7ywNSrBecgX4i/JGaWMofteWe0ZGCPrRrc9M1elqVhjgoYJgQ/hh6teWelZWyNr35TU3Du1lpqDS3qA9/eeqNMtZDH89WFxJQjyWdkUgWlhTu5jGMzfZ97Ll5LS2X5ZKWlst67Ln5AY0eSAczf/QsaffMoMoo7a4/1I+j0b71q/tfuto/kC2EP3qWpHtm0DdzNbv+0Gwc9fvevnNcj06f0YUV15oR0+07x1MbKzAIhD96lqQnP5a2y07HMbF1TA/tv5ELvhgahD96lqSdM5aVPLsZRyzdS0A/mHs61Usz+0tJH5d0vrLpz939m5XX7pV0t6QVSX/k7k+0+7xisejT09OpjBVhDKrm3+7v0MKJYWZmJXcv1m9Pe+b/t+7+13UDuV7SXkk7JL1d0tNmdq27r6Q8FkSml5l0t0HdrKZf/zmEPvImRNlnj6SH3f11ST82sxlJuyR9N8BYkCG9XCRuVNOXNPCVQzm7QGzS7vP/hJn9wMweMLPq/+I3STpTs898ZRvQUrMgb6VR334vn5NE9aD1+SdP684jU1HfE4H8SDTzN7OnJW1o8NJ9kr4k6TNabYn+jKTPS/qYJGuwf8MLD2a2X9J+SdqyZUuSoWII9HKRuNlF6UFebI6lswmolSj83f3mTvYzsy9L+nrl13lJm2teHpd0tsnnH5Z0WFq94Nv7SDEMeu0uqq/pD3rN/Vg6m4BaaXb7bHT3c5Wf/0TSO919r5ntkPSgVuv8b5f0HUnXtLvgS7cPsoyaP0IJ0e3zOTP7da2WdF6U9AeS5O6nzOwRSc9LWpZ0gE4fDDs6ihCb1MLf3T/a4rX7Jd2f1t8GALTGqp4AkEOEPwDkEOGPoZGFZwsAsWBhtyGR926SQS8RDWQd4T8EshR8aR2kuJEK6A7hPwSyEnxpHqS4kQroDuE/BLISfGkepAZ91y6QdYT/EEgr+GpLNJISf37aBylupAI6l9ryDv3G8g6D9eCzL+ngsZNaKa8+s1ZmWl5JXq7J+4VpYNBCPcwFGVSaW9TBYye1XF6dGCytuEwuV/JyDbNzIA70+eMSU7MLKtecEVbXwq9dEx9AtjHzxyWqtfml5bIKZvr0nht03YYrKNcAQ4TwxyWaXUCmzg8MD8IfDfWrNp+lG9CAPKHmj1QN+nm5ADpD+CNVjR6gDiA8yj5IFXfeAnEi/JE6evuB+FD2AYAcIvzRFg9JAYYPZR+0RKsmMJyY+Q+hfs7UadUEhhMz/yHT75l6Vp4VAKA7hH9E+rEMQr8fmEKrJjCcCP9I1M7YR0cK+uDEuG7fOS6pu4eopDFTp1UTGD6EfyRqZ+xLy2U99OxLenT6TNcPUWGmDqAThH8kqjP21y+U5dLqg1NWVn/q9iEqzNQBtEO3TySqM/aPvHOL1o7Y6lo4I8ZDVACkgpl/RKoz9tt2jvf1wekAUI/wj1B92YbQB9BvlH0AIIcIfwDIoUThb2Z3mNkpMyubWbHutXvNbMbMTpvZ+2q2T5jZf1Ze+3szsyRjAAB0L+nM/6Sk2yQ9U7vRzK6XtFfSDkm7JX3RzEYqL39J0n5J11T+szvhGAAAXUoU/u7+grufbvDSHkkPu/vr7v5jSTOSdpnZRkm/4u7fdXeX9M+S3p9kDACA7qVV898k6UzN7/OVbZsqP9dvb8jM9pvZtJlNnz9/PpWBAkAetW31NLOnJW1o8NJ97n6s2dsabPMW2xty98OSDktSsVhsuh8AoDttw9/db+7hc+clba75fVzS2cr28QbbAQADlFbZ53FJe83sMjO7SqsXdr/n7uck/dzMJitdPr8nqdnZAwAgJUlbPT9gZvOSbpT0DTN7QpLc/ZSkRyQ9L+nbkg64+0rlbX8o6YhWLwL/t6RvJRkDAKB7ttp0E79isejT09OhhwEAmWJmJXcv1m/nDl8AyCHCHwByiPAHgBwi/AEghwh/AMghwh8AcojwB4AcIvwBIIcIfwDIIcIfAHKI8AeAHCL8ASCHCH8AyCHCHwByaOjDvzS3qEPHZ1SaWww9FACIRtvHOGZZaW5Rdx6Z0tJyWWtHCzq6b1ITW8dCDwsAghvqmf/U7IKWlssqu3Rhuayp2YXQQwKAKAx1+E9uX6e1owWNmLRmtKDJ7etCDwkAojDUZZ+JrWM6um9SU7MLmty+jpIPAFQMdfhLqwcAQh8ALjbUZR8AQGOEPwDkEOEPADlE+ANADhH+AJBDhD8A5JC5e+gxdMTMzkuaCz2OlK2X9EroQUSE7+NifB8X4/u4WLPvY6u7X1m/MTPhnwdmNu3uxdDjiAXfx8X4Pi7G93Gxbr8Pyj4AkEOEPwDkEOEfl8OhBxAZvo+L8X1cjO/jYl19H9T8ASCHmPkDQA4R/gCQQ4R/ZMzsr8zsh2b2AzP7mpm9OfSYQjKzO8zslJmVzSy3bX1mttvMTpvZjJl9KvR4QjKzB8zsZTM7GXosMTCzzWZ23MxeqPx/5Y87eR/hH5+nJN3g7r8q6UeS7g08ntBOSrpN0jOhBxKKmY1IOiTpFknXS/qwmV0fdlRBfUXS7tCDiMiypD9z93dImpR0oJP/fRD+kXH3J919ufLrlKTxkOMJzd1fcPfToccR2C5JM+4+6+5Lkh6WtCfwmIJx92ckvRp6HLFw93Pu/lzl559LekHSpnbvI/zj9jFJ3wo9CAS3SdKZmt/n1cH/uZE/ZrZN0m9IerbdvkP/GMcYmdnTkjY0eOk+dz9W2ec+rZ7OHR3k2ELo5PvIOWuwjR5tXMTMflnSv0m6x91/1m5/wj8Ad7+51etmdpekWyW9x3NwI0a77wOal7S55vdxSWcDjQURMrM1Wg3+o+7+WCfvoewTGTPbLemTkn7H3V8LPR5E4YSka8zsKjNbK2mvpMcDjwmRMDOT9E+SXnD3v+n0fYR/fP5B0hWSnjKz75vZP4YeUEhm9gEzm5d0o6RvmNkTocc0aJUGgE9IekKrF/MecfdTYUcVjpk9JOm7kq4zs3kzuzv0mAK7SdJHJf1mJTO+b2a/3e5NLO8AADnEzB8AcojwB4AcIvwBIIcIfwDIIcIfAHKI8AeAHCL8ASCH/g9tO5cEnBSqEQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "from sklearn.datasets import make_regression\n", "\n", @@ -135,7 +104,7 @@ }, { "cell_type": "code", - "execution_count": 270, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -175,7 +144,7 @@ }, { "cell_type": "code", - "execution_count": 240, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -189,7 +158,7 @@ }, { "cell_type": "code", - "execution_count": 267, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -200,22 +169,9 @@ }, { "cell_type": "code", - "execution_count": 268, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfZRV9X3v8fdnnmFgGGAeQGaEwRAVNGAkhFRvE6NGk+aKK23uxVsrSU1JU1fT29WHq03b5HaFNqs395qkqem1aqMriYaVxEjTJksvebBJjGSMGlAkoCAMTzM8CMPTPH7vH2ePHoYDA3NmODNnf15rnXX2+e29z/5uWHzO5rf3/m1FBGZmlg4lhS7AzMzOH4e+mVmKOPTNzFLEoW9mliIOfTOzFHHom5mliEPfrAhJ+qGkjxS6Dht7HPo2ZkjaJum6QtdhVswc+mYjSFJZoWswOxOHvo0Lkn5P0hZJByStkXRB0i5Jd0tql3RI0i8lXZbMe5+kFyV1Stop6U9P890fkvQTSf+QfMdLkq7Nmj9F0v2Sdiff82lJpYPWvVvSAeBTOb6/RNKdkl6WtF/SaknTknlzJIWklZJ2Jdv4k6x1KyV9Lpm3K5muzJq/TNJzkg4n339j1qZnJ7V1SnpcUl1efwlWFBz6NuZJejfwd8B/AWYCrwKPJLPfA/w68GagFvivwP5k3v3ARyNiMnAZ8P0zbObtwCtAHfBJ4FsDwQw8CPQCbwKuSLb5kRzrNgCrcnz3x4GbgXcCFwAHgX8ctMw1wLzku+/M6ub6BLAUWAQsBJYAf5n8uSwBHgL+LNn3Xwe2ZX3nfwM+nNRVAeT80bOUiQi//BoTLzKBdV2O9vuBv8/6PAnoAeYA7wZ+RSYYSwattx34KFAzxHY/BOwClNW2DvgdoBHoAiZkzbsF+EHWutuH+P6NwLVZn2cm9Zcl+xDAJVnz/x64P5l+GXhf1rwbgG3J9P8F7j7NNn8I/GXW5z8Avlfov2O/Cv/ykb6NBxeQOboHICKOkDmanxUR3we+SObIea+keyXVJIv+JvA+4FVJP5L0jjNsY2dEZI8++Gqy3dlAObBb0muSXiMTtg1Zy+4Yov7ZwKNZ628E+sj8oOT6joFtn7Lvg+Y1k/lROJ09WdPHyPxYWso59G082EUmOAGQVA1MB3YCRMQXIuJKYAGZbp4/S9p/HhHLyAT0t4HVZ9jGLEnK+nxhst0dZI706yKiNnnVRMSCrGWHGqp2B/DerPVrI6IqInZmLdOcY9un7PugeTuAi4bYttlJHPo21pRLqsp6lQFfAz4saVFyEvNvgacjYpukt0l6u6Ry4ChwAuiTVCHptyVNiYge4DCZo+vTaQA+Lqlc0geBS4F/j4jdwOPA/5ZUk5yUvUjSO89hn/4JWCVpNoCkeknLBi3zV5ImSlpAph/+60n7w8BfJuvUAX8NfCWZd3/y53JtUtcsSZecQ12WQg59G2v+HTie9fpURKwF/gr4JrCbzNHt8mT5GuCfyZwcfZVMt89nk3m/A2yTdBj4feDWM2z3aTInUveRORn7WxExcEL4NjInQl9MtvMNMv3yZ+vzwBrgcUmdwM/InPzN9iNgC7AW+GxEPJ60fxpoBX4JrAd+kbQREevI/EDcDRxKvmM2Zmegk7sxzdJH0oeAj0TE1QXY9hxgK1AeEb3ne/uWPj7SNzNLEYe+mVmKuHvHzCxFfKRvZpYiY35wqLq6upgzZ06hyzAzG1eeeeaZfRFRP7h9zIf+nDlzaG1tLXQZZmbjiqRXc7W7e8fMLEUc+mZmKTJk6Et6IBmrfMOg9j+UtEnSC5L+Pqv9rmTc802Sbshqv1LS+mTeFwaNc2JmZufB2RzpfxnIfjADkq4BlgFvSQae+mzSPp/M7fELknXuGXjYBPAlYCWZW93nDf5OMzMbfUOGfkQ8CRwY1Pwx4DMR0ZUs0560LwMeiYiuiNhKZiyRJZJmkhnT/Klk+NqHyDxUwszMzqPh9um/GfhPkp5Oxil/W9I+i5PHBW9L2mYl04Pbc0oeHdcqqbWjo2OYJZqZ2WDDDf0yYCqZpxX9GbA66aPP1U8fZ2jPKSLujYjFEbG4vv6Uy0zNzGyYhhv6bcC3ImMd0E/m2aJtnPwwiCYyD3xoS6YHt4+aB3+6jX99flQ3YWY27gw39L9N5tmkSHozmbHG95EZM3y5pEpJLWRO2K5LHkTRKWlp8j+C24DH8q7+DB75+Q4ee27n0AuamaXIkHfkSnoYeBdQJ6kN+CTwAPBAchlnN7AiOUH7gqTVZB420QvcEREDTyv6GJkrgSYA301eo6axppI9h0+M5ibMzMadIUM/Im45zaycTyGKiFVknjw0uL0VuOycqsvDjJoqXth1+HxtzsxsXCjaO3IbaqrYd6SLnr7+QpdiZjZmFG3oz6ipIgL2HekqdClmZmNG0YZ+Y00lAHsOuV/fzGxAEYd+FQB7D/tI38xsQApC30f6ZmYDijb0p1dXUFYih76ZWZaiDf2SEtEw2dfqm5llK9rQB2icUkW7+/TNzF5X3KE/ucpH+mZmWYo69GdMqXKfvplZlqIO/YaaSjpP9HKsu7fQpZiZjQlFHfozfK2+mdlJijr0B67V9125ZmYZqQh99+ubmWUUeehnxt9x6JuZZRR16E+uKqe6otSXbZqZJYo69ME3aJmZZSv+0PcNWmZmryv60PcNWmZmbyj60G+oqaT9cBeZ57abmaVb0Yf+jJoquvv6OXisp9ClmJkVXNGHvm/QMjN7Q2pCf2+nQ9/MbMjQl/SApHZJG3LM+1NJIakuq+0uSVskbZJ0Q1b7lZLWJ/O+IEkjtxun9/oNWj7SNzM7qyP9LwM3Dm6U1AxcD2zPapsPLAcWJOvcI6k0mf0lYCUwL3md8p2joWGyB10zMxswZOhHxJPAgRyz7gb+HMi+LGYZ8EhEdEXEVmALsETSTKAmIp6KzGU0DwE35139WagoK2F6dYWv1TczY5h9+pJuAnZGxPODZs0CdmR9bkvaZiXTg9tP9/0rJbVKau3o6BhOiSdprKmi3aFvZnbuoS9pIvAJ4K9zzc7RFmdozyki7o2IxRGxuL6+/lxLPEVjjR+QbmYGwzvSvwhoAZ6XtA1oAn4haQaZI/jmrGWbgF1Je1OO9vMic1eu+/TNzM459CNifUQ0RMSciJhDJtDfGhF7gDXAckmVklrInLBdFxG7gU5JS5Ordm4DHhu53TizhslV7D/aRU9f//napJnZmHQ2l2w+DDwFXCypTdLtp1s2Il4AVgMvAt8D7oiIvmT2x4D7yJzcfRn4bp61n7UZU6qIgI5OH+2bWbqVDbVARNwyxPw5gz6vAlblWK4VuOwc6xsRA9fq7zl8ggtqJxSiBDOzMaHo78iFN+7K9RU8ZpZ2qQp9j79jZmmXitCfNrGC8lKx1336ZpZyqQj9khLRMLnK4++YWeqlIvQhczLXI22aWdqlKPSr3KdvZqmXqtBv9125ZpZyqQr9zq5ejnb1FroUM7OCSU3oz5iSPEzF1+qbWYqlJvQbk4epeLRNM0uz9IT+lIG7ct2vb2bplZ7Qr/GRvplZakJ/UmUZkyrL3KdvZqmWmtAHaKipdOibWaqlKvRn1PgJWmaWbqkKfd+Va2Zpl7rQb+88QcRpn8luZlbUUhb6lfT0BQeOdhe6FDOzgkhV6M9ILtt0v76ZpVWqQr/h9dB3v76ZpVOqQn/GFIe+maVbqkK/flJm0DXflWtmaTVk6Et6QFK7pA1Zbf9L0kuSfinpUUm1WfPukrRF0iZJN2S1XylpfTLvC5I08rtzZhVlJdRNqnCfvpml1tkc6X8ZuHFQ2xPAZRHxFuBXwF0AkuYDy4EFyTr3SCpN1vkSsBKYl7wGf+d50TC5yt07ZpZaQ4Z+RDwJHBjU9nhEDDyN5GdAUzK9DHgkIroiYiuwBVgiaSZQExFPReYi+YeAm0dqJ87FjCkOfTNLr5Ho0/9d4LvJ9CxgR9a8tqRtVjI9uD0nSSsltUpq7ejoGIES3zBzShVtB4+P6HeamY0XeYW+pE8AvcBXB5pyLBZnaM8pIu6NiMURsbi+vj6fEk/RUlfNoeM9HPQNWmaWQsMOfUkrgPcDvx1vjGvQBjRnLdYE7Eram3K0n3dz66sBeGXf0UJs3sysoIYV+pJuBP4HcFNEHMuatQZYLqlSUguZE7brImI30ClpaXLVzm3AY3nWPiwtdZMA2OrQN7MUKhtqAUkPA+8C6iS1AZ8kc7VOJfBEcuXlzyLi9yPiBUmrgRfJdPvcERF9yVd9jMyVQBPInAP4LgXQNHUCZSVi674jhdi8mVlBDRn6EXFLjub7z7D8KmBVjvZW4LJzqm4UlJeWcOG0iT7SN7NUStUduQNa6qp5pcOhb2bpk9rQ37b/KP39HlffzNIlnaFfX82Jnn6PwWNmqZPO0K/LXLbpfn0zS5tUhv7c5LJNX6tvZmmTytBvrKlkQnkpW30y18xSJpWhL4mWumpfq29mqZPK0IfMyVz36ZtZ2qQ29OfWVbPj4HG6e/sLXYqZ2XmT2tBvqaumrz/YcfDY0AubmRWJVIc+4JO5ZpYqDn3365tZiqQ29GsnVjCtusLX6ptZqqQ29AFftmlmqePQ95G+maVI6kN/7+Eujnb1FroUM7PzItWhP9cnc80sZVId+i1+SLqZpUyqQ3/OdF+rb2bpkurQryovZVbtBF/BY2apkerQB5jrgdfMLEVSH/otddW8su8oEX5erpkVvyFDX9IDktolbchqmybpCUmbk/epWfPukrRF0iZJN2S1XylpfTLvC5I08rtz7lrqquk80cv+o92FLsXMbNSdzZH+l4EbB7XdCayNiHnA2uQzkuYDy4EFyTr3SCpN1vkSsBKYl7wGf2dBeAweM0uTIUM/Ip4EDgxqXgY8mEw/CNyc1f5IRHRFxFZgC7BE0kygJiKeikw/ykNZ6xTUwPNyfQWPmaXBcPv0GyNiN0Dy3pC0zwJ2ZC3XlrTNSqYHt+ckaaWkVkmtHR0dwyzx7MyaOoHyUvlafTNLhZE+kZurnz7O0J5TRNwbEYsjYnF9ff2IFZdLaYmYPd0Dr5lZOgw39PcmXTYk7+1JexvQnLVcE7AraW/K0T4meOA1M0uL4Yb+GmBFMr0CeCyrfbmkSkktZE7Yrku6gDolLU2u2rkta52Cm1tXzbb9x+jr92WbZlbczuaSzYeBp4CLJbVJuh34DHC9pM3A9clnIuIFYDXwIvA94I6I6Eu+6mPAfWRO7r4MfHeE92XYWuqq6e7tZ9drxwtdipnZqCobaoGIuOU0s649zfKrgFU52luBy86puvMk+7LN5mkTC1yNmdnoSf0dufDGaJvu1zezYufQB+onVTKpssyhb2ZFz6EPSHp9DB4zs2Lm0E/4IelmlgYO/URLXTVtB49zoqdv6IXNzMYph37i4hmTiYBNezoLXYqZ2ahx6CcWNtcC8HzbawWuxMxs9Dj0ExdMqaJ+ciXPbXfom1nxcugnJLGwqZbndjj0zax4OfSzXHFhLa/sO8qhYz2FLsXMbFQ49LMsbHK/vpkVN4d+lrc0T0HCXTxmVrQc+llqqsq5qH4Szzv0zaxIOfQHGTiZm3mUr5lZcXHoD7Lowlr2H+2m7aDH1jez4uPQH+SK5CYt9+ubWTFy6A9y8YzJVJaVOPTNrCg59AcpLy3hsllTHPpmVpQc+jksaq5lw85D9PT1F7oUM7MR5dDPYWFzLV29/R5x08yKjkM/h4GTuc+6i8fMioxDP4emqROYVl3hm7TMrOjkFfqS/ljSC5I2SHpYUpWkaZKekLQ5eZ+atfxdkrZI2iTphvzLHx2SWNTsETfNrPgMO/QlzQI+DiyOiMuAUmA5cCewNiLmAWuTz0ian8xfANwI3COpNL/yR8+i5lpe7jjC4RMecdPMike+3TtlwARJZcBEYBewDHgwmf8gcHMyvQx4JCK6ImIrsAVYkuf2R83C5loiYH3boUKXYmY2YoYd+hGxE/gssB3YDRyKiMeBxojYnSyzG2hIVpkF7Mj6irak7RSSVkpqldTa0dEx3BLzsqjJd+aaWfHJp3tnKpmj9xbgAqBa0q1nWiVHW85RzSLi3ohYHBGL6+vrh1tiXqZMLGduXTXP+vGJZlZE8uneuQ7YGhEdEdEDfAv4NWCvpJkAyXt7snwb0Jy1fhOZ7qAxa2GzR9w0s+KST+hvB5ZKmihJwLXARmANsCJZZgXwWDK9BlguqVJSCzAPWJfH9kfdouZa9h3pYtehE4UuxcxsRJQNd8WIeFrSN4BfAL3As8C9wCRgtaTbyfwwfDBZ/gVJq4EXk+XviIi+POsfVQsHRtzc/hqzaicUuBozs/wNO/QBIuKTwCcHNXeROerPtfwqYFU+2zyfLp05mYrSEp5ve43feMvMQpdjZpY335F7BpVlpcy/oIbnfDLXzIqEQ38Ii5prWb/zEL0ecdPMioBDfwiLmms53tPHpr0ecdPMxj+H/hAWz8kMHfTTLfsLXImZWf4c+kNomjqRS2ZM5omNewtdiplZ3hz6Z+H6+Y20bjvAgaPdhS7FzCwvDv2zcN2ljfQH/OCl9qEXNjMbwxz6Z+HyWVNorKnkiRfdxWNm45tD/yyUlIhrL23kyc0dnOgZ0zcRm5mdkUP/LF0/v5Fj3X089Yqv4jGz8cuhf5beMXc6EytK3cVjZuOaQ/8sVZWX8s4317N24176+z3UspmNTw79c3DdpY3sPdzF+p1+hKKZjU8O/XNwzSUNlAj+n2/UMrNxyqF/DqZVV7B4zjT365vZuOXQP0fXX9rIS3s62XHgWKFLMTM7Zw79c3Td/EbAXTxmNj459M9RS101b2qY5C4eMxuXHPrDcP38Rp7eeoBDx3oKXYqZ2Tlx6A/DdZc20tcf/PBXHoDNzMYXh/4wXNFcS92kCnfxmNm449AfhpISce0ljfxoUwfdvX52rpmNHw79Ybp+fiOdXb08vdUDsJnZ+JFX6EuqlfQNSS9J2ijpHZKmSXpC0ubkfWrW8ndJ2iJpk6Qb8i+/cK56Ux0TK0r59rO7Cl2KmdlZy/dI//PA9yLiEmAhsBG4E1gbEfOAtclnJM0HlgMLgBuBeySV5rn9gplQUcpvXdnEvz6/i/bOE4Uux8zsrAw79CXVAL8O3A8QEd0R8RqwDHgwWexB4OZkehnwSER0RcRWYAuwZLjbHws+fFUL3X39fOVn2wtdipnZWcnnSH8u0AH8i6RnJd0nqRpojIjdAMl7Q7L8LGBH1vptSdspJK2U1CqptaOjI48SR1dLXTXXXtLAV3/2qp+oZWbjQj6hXwa8FfhSRFwBHCXpyjkN5WjLOTB9RNwbEYsjYnF9fX0eJY6+269uYf/RbtY85759Mxv78gn9NqAtIp5OPn+DzI/AXkkzAZL39qzlm7PWbwLGfVK+46LpXDJjMg/8ZCsRfriKmY1tww79iNgD7JB0cdJ0LfAisAZYkbStAB5LptcAyyVVSmoB5gHrhrv9sUISv3t1Cy/t6eSnL/vyTTMb2/K9eucPga9K+iWwCPhb4DPA9ZI2A9cnn4mIF4DVZH4YvgfcERFF0RF+08ILqJtUwf0/3lroUszMzqgsn5Uj4jlgcY5Z155m+VXAqny2ORZVlZfy22+fzefXbubljiNcVD+p0CWZmeXkO3JHyK1LZ1NRWsKXf7Kt0KWYmZ2WQ3+E1E+uZNmiC/jGM228dqy70OWYmeXk0B9BH76qheM9fTy8bsfQC5uZFYBDfwTNv6CGX7toOg89tY2ePo++aWZjj0N/hN1+dQu7D53guxv2FLoUM7NTOPRH2DUXN9BSV80/rN3ssfbNbMxx6I+wkhLxifddyub2I/zzf7xS6HLMzE7i0B8F181v5H2Xz+Dzazezdd/RQpdjZvY6h/4o+dR/XkBlWQl/8a31HpPHzMYMh/4oaaip4s73XsJTr+znm7/YWehyzMwAh/6ouuVtF7J49lQ+/W8vsv9IV6HLMTNz6I+mkhLxdx+4nKNdvXz63zYWuhwzM4f+aJvXOJmPvfMiHn12J/+xeew+BczM0sGhfx78wTVvYm5dNZ94dAPHu4tiNGkzG6cc+udBVXkpf/uBy9l+4BifW/urQpdjZinm0D9Pls6dzvK3NXPvk6/wnV+O+6dEmtk45dA/jz510wIWz57KH3/9OX6yZV+hyzGzFHLon0dV5aXcd9vbmFs3iZUPtbJh56FCl2RmKePQP8+mTCznoduXUDuxgg/9yzq2eZgGMzuPHPoF0FhTxUO3L6GvP7jtgXW0d54odElmlhIO/QK5qH4S//LhJXR0drHigZ9z+ERPoUsysxRw6BfQouZavnTrW9m8t5OVD7VypKu30CWZWZHLO/QllUp6VtJ3ks/TJD0haXPyPjVr2bskbZG0SdIN+W67GLzr4gY++8GFrNt6gJu++GM27eksdElmVsRG4kj/j4DsgWXuBNZGxDxgbfIZSfOB5cAC4EbgHkmlI7D9ce/mK2bxtd9byuHjvdz8jz/h0WfbCl2SmRWpvEJfUhPwG8B9Wc3LgAeT6QeBm7PaH4mIrojYCmwBluSz/WKydO50/v3jV3N50xT++OvP8xePrudEj4dsMLORle+R/ueAPweyHwbbGBG7AZL3hqR9FrAja7m2pO0UklZKapXU2tGRnkHKGmqq+NpH3s5H3zmXrz29nQ/+01PsOHCs0GWZWREZduhLej/QHhHPnO0qOdpyPlIqIu6NiMURsbi+vn64JY5LZaUl3PXeS7n3d65k2/6jvP8ffsxXn36Vnj4/ZN3M8pfPkf5VwE2StgGPAO+W9BVgr6SZAMl7e7J8G9CctX4T4EFoTuM9C2bwnT+8mjc3TuITj27gPXc/yb/9crcfvWhmeRl26EfEXRHRFBFzyJyg/X5E3AqsAVYki60AHkum1wDLJVVKagHmAeuGXXkKzJ5ezeqPvoP7bltMeam442u/4KYv/oQfb/a4PWY2PGWj8J2fAVZLuh3YDnwQICJekLQaeBHoBe6ICJ+pHIIkrpvfyDWXNPDoszu5+4lfcev9T3P1m+r4g3ddxNK50ykpydVzZmZ2Ko317oLFixdHa2trocsYM0709PHVp7fzxe9v5uCxHmbVTuA33zqLD7y1iTl11YUuz8zGCEnPRMTiU9od+uPT8e4+Hn9xD994po0fb9lHBLxtzlR+861NvPfymUyZUF7oEs2sgBz6RWz3oeM8+uxOvvlMGy93HKVEcHlTLb920XSuuqiOK2dPZUKF74MzSxOHfgpEBM+3HeL7L7Xz0y37eG7Ha/T2BxWlJVxxYS1vnzud+TNruHTmZJqnTvS5ALMi5tBPoaNdvfx82wF++vJ+fvryPl7YdZiBv+7qilIunjGZS2bWcMmMyTRPm0jz1Ik0TZ1AVbn/V2A23jn0jePdffxqbycv7TnMxt2dbNx9mJf2dHLo+MnDOtdNqqRp6gSapk6gYXIV0ydVUD+pkumTKpg+qZK6SRXUTqyguqIUyf9bMBuLThf6o3HJpo1REypKWdhcy8Lm2tfbIoL2zi7aDh6j7eBxdhzIvLcdPM6GnYfo6GznaHfuK2tLBDUTyplcVUZNVeZ9UmU5EytKmVhRyoTkfWJFGRPKS6koK6GyrCR5L6Uy+VxWWkJZqSgvKaG8TJSVlFBeKkpLMtOlJXr9VZa8S1AqUSK5m8rsHDj0U04SjTVVNNZUceXs3Msc7+5j35Eu9h/tZv+RLvYd6eLQ8R4OH+/l8IkeOk/0cvh4D4dP9NB28Bgnevo41t3H8e4+jvX00dc/+v+bLC0RJcrsT4lAZN5LJEjek8nXl4E32jJ/Fpn1Tm574wdFyrxe/5wsNdB2up+ek77jpBmnWf6s9vj02xjrxk+lhfedj19NZdnIdrc69G1IEypKM33+0yae87oRQXdfPye6++nq66Orp5+u3n66e/vp6u2jq7ef3r6gpz957+unpy8z3dcf9EXQ2x/09fXTF9Db109/QH8E/cn8/v6gP6AvgojMNvuT6YFlgdfbgjfmDQz/lFnvjXkDc96YjpNGioqs/cv+fOr+n7pO9nqnLH+2f7B5r1QYMZ6KHQM0Cj+RDn0bVZKSrpxSwPcOmBWaH5doZpYiDn0zsxRx6JuZpYhD38wsRRz6ZmYp4tA3M0sRh76ZWYo49M3MUmTMD7gmqQN4dZir1wFpfKCs9ztdvN/pcrb7PTsi6gc3jvnQz4ek1lyjzBU773e6eL/TJd/9dveOmVmKOPTNzFKk2EP/3kIXUCDe73TxfqdLXvtd1H36ZmZ2smI/0jczsywOfTOzFCnK0Jd0o6RNkrZIurPQ9YwmSQ9Iape0IattmqQnJG1O3qcWssbRIKlZ0g8kbZT0gqQ/StqLet8lVUlaJ+n5ZL//Z9Je1PsNIKlU0rOSvpN8Lvp9BpC0TdJ6Sc9Jak3ahr3vRRf6kkqBfwTeC8wHbpE0v7BVjaovAzcOarsTWBsR84C1yedi0wv8SURcCiwF7kj+not937uAd0fEQmARcKOkpRT/fgP8EbAx63Ma9nnANRGxKOv6/GHve9GFPrAE2BIRr0REN/AIsKzANY2aiHgSODCoeRnwYDL9IHDzeS3qPIiI3RHxi2S6k0wYzKLI9z0yjiQfy5NXUOT7LakJ+A3gvqzmot7nIQx734sx9GcBO7I+tyVtadIYEbshE45AQ4HrGVWS5gBXAE+Tgn1PujmeA9qBJyIiDfv9OeDPgf6stmLf5wEBPC7pGUkrk7Zh73sxPhg91+PjfV1qkZI0Cfgm8N8j4rCU66+/uEREH7BIUi3wqKTLCl3TaJL0fqA9Ip6R9K5C11MAV0XELkkNwBOSXsrny4rxSL8NaM763ATsKlAthbJX0kyA5L29wPWMCknlZAL/qxHxraQ5FfsOEBGvAT8kc06nmPf7KuAmSdvIdNe+W9JXKO59fl1E7Ere24FHyXRhD3vfizH0fw7Mk9QiqQJYDqwpcE3n2xpgRTK9AnisgLWMCmUO6e8HNkbE/8maVdT7Lqk+OcJH0gTgOuAlini/I+KuiGiKiDlk/j1/PyJupYj3eYCkakmTB6aB9wAbyGPfi/KOXEnvI9MHWAo8EBGrClzSqJH0MPAuMsOt7gU+CXwbWMI9dFsAAACBSURBVA1cCGwHPhgRg0/2jmuSrgb+A1jPG/28f0GmX79o913SW8icuCslc9C2OiL+RtJ0ini/ByTdO38aEe9Pwz5Lmkvm6B4y3fFfi4hV+ex7UYa+mZnlVozdO2ZmdhoOfTOzFHHom5mliEPfzCxFHPpmZini0DczSxGHvplZivx/kcKo7F+XBZwAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "plt.plot(loss_history)\n", "plt.title('Loss per epoch');" @@ -223,22 +179,9 @@ }, { "cell_type": "code", - "execution_count": 269, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXxcVb3v8c9vJkkr8hQKSGlKU6TQUqClSWuxVAGL8mQRCoeC0lbl4SDeq0de3AMHBUU5elTOBY/1cBC0KAUOaKEVEHn0WoGQJrSVPkopqYlUKSFAe4EmmVnnjz2TTiYzyWSe9p6Z7/v16iuZPXsyK1G+e81vrb2WOecQEZHKEvK7ASIiUnwKfxGRCqTwFxGpQAp/EZEKpPAXEalACn8RkQqk8BcRqUAKfylpZtZmZt1mdmDS8TVm5sysPva4zsx+bWZvmNnbZvaSmS2KPVcfO3dX0r8LMmzD982s3czeMbNtZnZd0vO3m9lmM4vG3zPhuduS3nO3me0c5L3CZvYdM3vNzHaa2Woz2z+TdookUvhLOXgVuDD+wMyOBT6QdM4vgXZgHDAKWAD8Pemc/Z1zeyf8++8M3/9OYKJzbl/go8BFZnZuwvNrgS8BLya/0Dn3j4nvCdwLPDDIe30r9h4nAPsCFwPvZ9hOkT5VfjdAJA9+iRfm/xF7vBD4BfCdhHOmA//knPv/scer8/XmzrnNSYeiwBEJzy8GMLNBQ9rMPgjMA85K83wt8FVginNuW+zwuiybLRVOPX8pB03AvmY2yczCwAXA3SnOWWxm883ssOH8cDO7yMz+NMQ515jZLqAD+CBwz3DeI2YesAP4Q5rnjwV6gfPM7G9m9mczuzKL9xFR+EvZiPf+TwU2AX9Nev58YCXwDeDV2JjA9KRz3jCztxL+TQJwzt3jnDtusDd3zn0P2AeYFmvL21n8DguBX7j0C27VAfsBRwLjgfOAb5rZqVm8l1Q4hb+Ui18CFwGL8Eo+/Tjnupxz1zjnJgMfAtYAD5mZJZx2oHNu/4R/G4fTAOdZDbyHV5vPmJmNBT6equ0J3ot9vdE5955z7k/AfcAZw3kvEVD4S5mI1cBfxQvCZUOc+wbwQ+BQ4IACNKcK+PAwX7MAeM45t3WQc+KlJy3FKzlT+Es5+SJwSsKgbh8z+zczO8bMqsxsH+AKYItzrjOXNzSzkJldbma15pkBXAk8lXBOjZmNBAyoNrORZpb8394CYMlg7+WcewWvdHWdmY2IlaUuAB7O5XeQyqTwl7LhnHvFOdeS5um9gAeBt4CteFM+5yad81bSnPuvAZjZZ81s/SBvfQ7wCrATb6D5P9gz8wjgcbySzUeB22Pffyz+pJmdgFfPHzDF08x+a2b/knDowljbO4FHgG84555Kfp3IUEybuYiIVB71/EVEKpDCX0SkAin8RUQqkMJfRKQClczaPgceeKCrr6/3uxkiIiWltbX1DefcQcnHSyb86+vraWlJN4tPRERSMbNtqY6r7CMiUoEU/iIiFUjhLyJSgUqm5p9KT08PHR0dvP9+eW9kNHLkSOrq6qiurva7KSJSJko6/Ds6Othnn32or6+n/8q85cM5R2dnJx0dHYwfP97v5ohImSjpss/777/PqFGjyjb4AcyMUaNGlf2nGxEprpIOf6Csgz+uEn5HkVLTuq2Lxc9soXVbl99NyUpJl31ERPzQuq2Lz97RRHdvlJqqEEsvmUnDuFq/mzUsJd/zD5JvfvOb/PCHP0z7/EMPPcSGDRuK2CIRKYSmrZ1090aJOujpjdK0Nac9gXyh8C8ihb9IeZh5+ChqqkKEDaqrQsw8fJTfTRq2igv/fNfpbrrpJo466ijmzJnD5s2bAfjpT3/K9OnTmTJlCvPmzePdd9/lueeeY8WKFVx99dVMnTqVV155JeV5IhJ8DeNqWXrJTL72yaNKsuQDFRb+8TrdzY9v5rN3NOV8AWhtbeW+++5j9erVLFu2jFWrVgFw7rnnsmrVKtauXcukSZO48847+ehHP8rcuXP5wQ9+wJo1a/jwhz+c8jwRKQ0N42q58uQjSjL4ocIGfFPV6XL5H27lypWcc8457LXXXgDMnettCbtu3Tq+/vWv89Zbb7Fr1y4+9alPpXx9pueJiORbRfX8C1GnSzUNc9GiRfz4xz/mpZde4oYbbkg7Rz/T80RE8q2iwj/fdbqPfexjPPjgg7z33nvs3LmT3/zmNwDs3LmT0aNH09PTw9KlS/vO32effdi5c2ff43TniYgUWkWVfcC7AOSrRjdt2jQuuOACpk6dyrhx45g9ezYA3/72t/nIRz7CuHHjOPbYY/sCf/78+Vx66aX86Ec/4le/+lXa80RECs2cc363ISONjY0ueTOXjRs3MmnSJJ9aVFyV9LuKSP6YWatzrjH5eEWVfURExJOX8Dezn5nZ62a2LuHYAWb2hJm9HPtam/DctWa2xcw2m5mmuIiIFFm+ev5LgNOSjl0DPOWcmwA8FXuMmR0NzAcmx17zEzML56kdIiKSgbyEv3PuD8CbSYfPBu6KfX8X8JmE4/c553Y7514FtgAz8tEOERHJTCFr/h9yzm0HiH09OHZ8DNCecF5H7JiIiBSJHwO+qRanTznlyMwuM7MWM2vZsWNHgZslIlI5Chn+fzez0QCxr6/HjncAYxPOqwNeS/UDnHO3O+canXONBx10UAGbGhx77723300QkQpQyPBfASyMfb8QWJ5wfL6ZjTCz8cAEoLmA7fBdJBLxuwkiIv3ka6rnvcDzwFFm1mFmXwS+B5xqZi8Dp8Ye45xbD9wPbAAeA650zhUvHdubYeXN3tc8aGtrY+LEiSxcuJDjjjuO8847j3fffZf6+npuvPFGTjzxRB544AFeeeUVTjvtNBoaGpg9ezabNm0C4NVXX+WEE05g+vTpfOMb38hLm0REhpKX5R2ccxemeeoTac6/CbgpH+89LO3NcNdciHRDuAYWroCxuU802rx5M3feeSezZs3iC1/4Aj/5yU8AGDlyJH/84x8B+MQnPsFtt93GhAkTeOGFF/jSl77E008/zVe+8hWuuOIKFixYwOLFi3Nui4hIJirrDt+2lV7wu4j3tW1lXn7s2LFjmTVrFgCf+9zn+gL/ggsuAGDXrl0899xznH/++UydOpXLL7+c7du3A/Dss89y4YXetfPiiy/OS3tERIZSWQu71c/2evzxnn/97Lz82ORlneOPP/jBDwIQjUbZf//9WbNmTUavF5HS07qti6atncw8fFRJbPBSWT3/sTO8Us8p1+Wt5APwl7/8heeffx6Ae++9lxNPPLHf8/vuuy/jx4/ngQceAMA5x9q1awGYNWsW9913H4CWdRYpUfneJbAYKiv8wQv82VflLfgBJk2axF133cVxxx3Hm2++yRVXXDHgnKVLl3LnnXcyZcoUJk+ezPLl3uSnW2+9lcWLFzN9+nTefvvtvLVJRIon1S6BeZHnCSqJKqvsUyChUIjbbrut37G2trZ+j8ePH89jjz024LXjx4/v+9QAcM011xSkjSJSOPFdAnt6o3nbJbBQE1TiFP4iIjmK7xLYtLWT2r1q+nr+OdX+U01QUfgHR319PevWrRv6RBEpKcMdwI2f89k7mujujVJTFcptu9gCTVCJK/nwd86V/WyZUtltTaRcxAdwhxviqWr/WYd/fIJK20ov+PPY64cSD/+RI0fS2dnJqFGjyvYC4Jyjs7OTkSNH+t0UkYqRbYjnvfY/dkbeQz+upMO/rq6Ojo4Oyn3Fz5EjR1JXV+d3M0QqRrYhnlj7D/p8/5LewF1EpFCCcNNWPtqQbgP3ku75i4gUSsO4Wl977tmOO2Sq8m7yEhEpAQW7cSxG4S8iEkDxcYewkb8bxxKo7CMikk/tzbD2HsBgyoVZz9Yp9OCxwl9EJB/iof/i3RDt8Y6tXgqLHs7pAlCocQeFv4hIrlqWwKNXQbS3//ECLMuQL6r5i4jkor05dfBDQZZlyBf1/EXEd0GYUz8siXV9HLjonucsDEedDnsfnFPNv9AU/iLiq0LPZ8+79maiPz8Ti3YDYKFqCFV5PX8LwRk3Q+Mif9uYAYW/iPgqr4uhFcFf1zzOIZEeQrHlxFy0F2v8POxXV5AF2ApF4S8ivhruOjp+l4iejxzNpwkTcl6NPxKqpirA5Z10FP4i4qvhzGcvaomovTnlcsrjjz+Zha3X82n3/wiFjOPPuIKJJRb8oPAXkQAYaj57vLf/2lvvFadENMgWig3jarn6kgU0bT2TmYePYmKAS1SDUfiLSKAl9varQkZVOEQkkse9clMZYgtFvxd9yweFv4gU3XDq9okDwpGo44IZYxmz/wcKW/Mf5haKfo9DZEPhLyJFNdy6ffKA8LxpdYUP2GFsoVhyU1VjFP4iUlTDndrp2+5YGW6hWGpTVeMU/iJSVNlskRjkGnve9+0tEm3jKCJFl1wjz7Rmnu48v2vufr//YLSNo4gERmJPPtOaebrzglBzD/Ink3S0qqeI+CrT7QrTnVfo7Q7LVcF7/mbWBuwEIkCvc67RzA4A/huoB9qAf3DOdRW6LSISPJnWzNOdV6o1d78VvOYfC/9G59wbCce+D7zpnPuemV0D1Drn/nmwn6Oav0j5KvWaf5AFreZ/NnBS7Pu7gN8Dg4a/iJSvTGvm6c5rCL1MQ9VKCM0GSm+dHT8UI/wd8LiZOeC/nHO3Ax9yzm0HcM5tN7ODU73QzC4DLgM47LDDitBUESkp8U1VVt/jraeftA6PpFeM8J/lnHstFvBPmNmmTF8Yu1DcDl7Zp1ANFJES1LdvbgSvj0lg9swthTJUwcPfOfda7OvrZvYg3meyv5vZ6FivfzTweqHbISJlIN7T37UDNv/WW3itjwViz9wgTD3NREHD38w+CISccztj338SuBFYASwEvhf7uryQ7RCREhcP/RfvhmjPwOctDA0LA7Fnbqks91Donv+HgAfNLP5e9zjnHjOzVcD9ZvZF4C/A+QVuh4hkIJDlilTlnT4GoXCg9s0tlamnWt5BRIDilCuGfXFpb4afn+4N5iYLVcO0z8GUi3zv7ScL0kU0aFM9RSQAEkOq0OWKrC4ubSvBRfc8tjAcdTrsfXAgSjzplMJyDwp/kQqVHMbXnzW5oOWKrC4u9bMhPAIiu8FCgSrvlDqFv0iFSg7jrne7C7pu/qC18CdugI0rYNJcOPVbe44PY1MVGR6Fv0iFShXGhSxXpN2U5YkbcM/e4n3/7C0YDLwAKPTzTgO+IhUsXvOv3auGrne789bjH3LAs725rzf//v2XMuKdNszAOdi9bz0jr1qbcxvEowFfkQII0qyObMTbnM9ZPoMO7KZYjuHV2pOY+E4b8X7ouv0+zoCkkrxT+ItkqVTu5BzKshc72N0TxZGfWT5pB3bbm+GuudD7PonLMex72DH89G9VnGrNPOFm0JBY8pGCUfiLZKlU7uQcTOu2Lh5oae+7dSoczn2WT9qB3baV3to7fe/mLccwZuonaTjuSh4t4U9QpUjhL5KlUrmTczBNWzvpjXphbMCkQ/bJ+Wc2jKvlobnVdG14mtqjT2FiPMzrZ3tr70S6vbtyj/9c31z9BlDoF5kGfEVyUOo1/8TSVdR5F4AR1TmWsOLlnUj3wCWWEwZ6NYOnONIN+GoPX5EcNIyr5cqTjyjJ4Ic90y9nHXEgBv3q/llpb4bffxd6d3srbsaXWI4bOwNmX6XgDwCFv0iFaxhXy1fnHMmI6hBhI/sSVrzHv/X3QBQIBWKJZUlNNX+RChcvXS06oZ7129/h9GNGZ77oWmIJJz6g66LeUgyHnwQnXatefkAp/EUqWKqa/6q2NznqkH3SXwDam2HtvbD6bm+Z5XhdP3FAN1yj4A84hb9IBUucrgoMPde/b239hCWW43X92VcVdR2eUh9s95vCX6SE5RqA8emq8QtAiEFq/u3NA4M/aevE1ugEmnoPYGZ0FA3Z/UoZKZcb7Pyk8BcpUfkIwMTF1oZc3yfV2voNC/o2UylmIJfDDXZ+U/iLlKjEAOzuiXLLk3/mq3OOzOoCkNFrhlhbv1CBnOrTTTncYOc3hb9Iiardq4aQGc45osCzW95gVdubhetxx9bW/+uax3k+cjTjDzq5X2mnEIGc7tNE2uWhJWMKf5ES1LqtixsfXk/UOSx2d1bOPe4nboCX7ofa8TDnmykHbFujE/hss9fDr2lt6nehKUQgD/ZpohS2Sgwyhb9IARR6JkpiKIYMwiHvE0BWPe6WJfD7f4Vdf/cev/Ma/Ow0+MJjAy4AQ5V28h3IKu8UjsJfJM/SlSpyvSAkvj45FK8/a3J2m7H8+lKvt5/MRbwB3qTwL3YYD/VpQtM9s6fwF8mzVL1jyG3DlFQXlJxKLO3N8OytsOnh1M9bOO2yDPOm1eFiX4sRuOk+TWi6Z24U/iJ5lqp3PJyZMKl6s6leP9wF5Vq3dfHq6mf4+HtPcNCWX3uzdlI55Fg4898H9PqTw3betLqM37sQNN0zNwp/kTxLV6rIpFySrjeba7mldVsXD93xHa4P/ZwQEZx5Szn0kyb044IWthoPyI3CX6QAkksVyRcEgMXPbBlQskkXsFnPpGlZAhuX43aP5frQ3VQR8TZKB8AgVAWjj4PjF/Sbs59K0MJW0z1zo/AXKZJ4iA9Wqx4sYIc1kyZW03exmn4DXuCbgXPgLIw1LmTTh87kqV31zDxo6OUYghi2mu6ZPYW/SJGlGxCOh2omnxAGFVtX3/W+B25P4GMhogChEKEzb6b1oLNjF6HNGQ+YKmzLh8JfpMiSe/e1e9UM+CRw5clHZD+bJbaufnxnrvhOrS/WXUzjUeP6VtxsemZLoGr4UlwKf5EcZDPPPLl8kq7On9EAa3szrL0Hdu2AvQ/2NkSPravvIt30OFgfHccyTuEzp34dEl4ftBq+FJfCXypetjcK5TLPPLl8kiqEhwznliXwyD/1X2lz9VJY9DAsXIG1rWTryCk8t6uez6T43YJYw5fiUfhLRcslwIeq3Wf7SSCjtXLam+GRr/UPfui/scrYGUwEJg7x3gr9yuRb+JvZacCtQBi4wzn3Pb/aIpUrl7nrmdTus/0kMNTx5LX1Y2V9bIgN07UcgsT5Ev5mFgYWA6cCHcAqM1vhnNvgR3ukcuVS9860dp+z+EbpHxgF73V64d5X198NDiLAM66RsWdey8Q0N2kN9SlHF4bK4lfPfwawxTm3FcDM7gPOBhT+UlS51r0zqd3npG/P3Ahe/z4EVSO8vXIXPcy6R2/jT+1v8evIbNZyJF/bVZ+2zDPYxUnr5FQev8J/DNCe8LgD+EjySWZ2GXAZwGGHHVaclknFyVfdO+8DqCn3zI32q+t3nzaBb9/RRA9DX3AG+5QTtKUbpPD8Cv8By4qwp2y554BztwO3AzQ2Ng54XiRo8nIhiZd53m6HaNKALqF+G6YP54Iz2Lma9ll5/Ar/DmBswuM64DWf2iJScEPW0xPr+o9d4/XuQ1UQroJIL4RCcMKXYeS+fTdpxQ3ngjPYwLKmfVYWv8J/FTDBzMYDfwXmAxf51BaRghqynt5X1496Ie+i3r8o0LAQ9qsbEPj5aFOqqaUK/crhS/g753rN7MvA7/Cmev7MObfej7aIFFrT1k5290RxQHdPUj09ua4fdRAKA+aVd6ZcmNfQBw3uise3ef7OuUeBR/16fykfQZ+iWLtXTd+AVjT2eE9dv6P/jVqhMJxx854pnXkOftDgrnh0h6+UtFLoxXa9203I4B/sKU4PN+NWHQdvLYvV9cMQqoZoD1jIC/6kdfXzfXHT4K6Awl9KXCn0YmcePoqfh7/Lx8IveQd2vIQzw3AQhR1HXsCW7lpqjz6FiY1z+r0224vbYBeMfA3uBv0TlwxO4S8lrRR6sQ1/vgVX9VK/tfUdhlmIaKia/7VhIs29R1CzpYelB3dltLPXYDK5YOQ6uFsKn7hkcAp/KWmBnaL4xA2wcQVMmut9JWFTFeDvx17G6IMPZlnneJpfGJE23LO5uBXj01ApfOKSwSn8peQVY4pixiWO+Gqbf4uVeJ69BQ5t6NtYBYO3D/0Yo+f9GwDjt3VR09qUNtyzubgV49NQKXziksGZc6Vx42xjY6NraWnxuxlSgTIucbQ3w5IzIdLt7ZdLbN/cAw7f8wlg0lw49VsDfn6+P7nEf2btXjV0vdtdkE9FqvmXBjNrdc41Jh9Xz19kCBmXONpWQqQHiAV/rF+1fcwnGX3qtwaEflwhPrnEf14h6/K6Kay0hfxugEjQxUscYWNgiaNlCfzyHO9r/WwIV/ftm+uAByOzWHbApb60O91mMyKgnr9IWolljevPmsxv123n9GNG7+nttiyBh7/iff/K03DWrbDoEd744895cuPr/Lr3RNaFJ7LUp3q46vIyGIW/SAr3vPAXrl++jkjUETKwkDHFbeZv2zaxKXQ+E6fPgY3L+79o43JoXMRBF87gyG1dnLy1k2t9rIcHdiaUBILCXyRJ67Yurl++jt6oV7SPOPg/LOUfqx/BcEQf/RUc8luYdLbX44+bdHbft0GphwelHRI8Cn+RBK3burjlyT/3Bf80+zOXhR/mU2FvppkZhFwvPHsrzF/qvWjjci/4k5ZlKFZ71bOXbCj8RWISp3TGQ39O+EXCeAuvWWwLIgPYud170LjIl9AH3WUruVH4SyAEoQfbtLWTyZFNXBp+mDnhVsLm9f7j8/UT74ix4xf40cR+dJet5ELhL74LSg/2E3u3cUn1d6jBW1u/316jFqIjcgDvuhHczel85qCzaSh6C/vTbB7JhcJffOd7D7ZlCWxczsTqvXAWGbjBtIV55ohruGTdZKIOwgaHDNHGYnyS0WweyYXCX3znSw82vpnK++946+/EmIXBRWIPQnDUGTDrK+wXnUDNpvRr8CRKnCY6orqwn2Q0m0eypfAX3xW9B9veDHfN9TZTIWltq0OnwujjAOu3hWIDZNTG5GmiA7ZtFAkIhb8EQmIPtqAlk/Zm+P13IbI7tn1i0gonxy9IO3snk15209ZOogmLJYZCplq8BJLCXwommxAvyOBvezOsvRd2vQ4vPw6RXiDqlXXCI+Ajl8Pf/pSXufrxElZ3b5SQGTeefYx6/RJICn8piGxDPO+Dv+3NsOQsr6cPCUsth7DDT4KTrs3rJukahJVSofCXgsg2xPM++Nu2Mlbbj3EQweimim0Tr2RiHoM/ToOwUgoU/lIQ2YZ43nvO9bMhXAOR3TighzD3R07ioehsTt5Vz8TcfrpIydJOXlIwRb9rNz59s352/1JOezOv/3EJW3fs4t9fn0ZrZALVg5SignC3sUi+aCcvKbqilT/iA7qr74ZoxOvpL1zRdwFojU7gsxs+TXdvlKpwiAtm1DFvWl3a4E83VqGLgpQThb+Utvic/d736ZuzH+n2PgHEwj9x/CESiTJm/w+kDe90YxWt27q48Kd7bvK691ItoialTds4Sk5at3Wx+JkttG7rKs4btjfDypu9r5AwoBsvX5rX86+f3feSQbdhTJLu3GUvdtDdG8UB3b1Rlr3YUZjfT6RI1POXrOU6J3/YZZTEO3PjpZ2+Ad1uCFXB8RfBlIv61fyHGkRObkeqc5NHxkpjpEwkPYW/ZC2XOflZXTjivXwX2VPamX2VdxFINdCbIN34Q7p2JJ87b1odv2pppyfiqA4b86bVZfR7igSVwl+ylsuc/KwuHIm9/MTSztgZWd+olWk7GsbVcu9lJ2jAV8qGwl+ylsuc/EEvHOmmbI6dkVEvfziGcwHTzVtSTgo2z9/MvglcCuyIHfoX59yjseeuBb4IRID/7Zz73VA/T/P8y0/Kmn+qun6OIT/U2IKmcEo582ue//91zv0wqSFHA/OBycChwJNmdqRz8UXUpVL060nHe/tvdwys6yeE/3CDOl1NP/nnKPSl0vhR9jkbuM85txt41cy2ADOA531oiwRBYm8/FPZm7UQZMGUzm0HiVDV9oOjbRurThQRNocP/y2a2AGgBrnLOdQFjgKaEczpix6TS9PX22/f09qNAwwLYb+yAun42g8SpavrF3jYyKHsUiyTKKfzN7EngkBRPXQf8J/BtvCnR3wZuBr4AA7dIJc20aTO7DLgM4LDDDsulqRI0/Xr7VV6PP97bT5qnH5fN7KJ0g9LF3DbS9z2KRVLIKfydc3MyOc/Mfgo8HHvYAYxNeLoOeC3Nz78duB28Ad/sWyqBkba3vxD2qxtyrn42s4uSa/rFXnPflz2KRYZQyNk+o51z22Pf/xPwEefcfDObDNyDV+c/FHgKmDDUgK9m+5SweOB/YBQ8ds2e3j4u5UJs5Ug1f/GLH7N9vm9mU/FKOm3A5QDOufVmdj+wAegFrtRMnzKVvNqmmbdvrotm3NsvF5pRJEFTsPB3zl08yHM3ATcV6r0lAFKttulCEArRt/jalAvLPvRFgkp3+EphpFpts2oEnPY9eK+zInr7IkGm8JfCyGC1TRHxj8JfclPEdXiGokFVkcwp/MuEL8E31Do8Oay2OVy6kUpkeBT+ZcC34Eu1vv4QYV+oi5RupBIZHoV/GfAt+NKtr59GIS9SupFKZHgU/mWg4MEXn6+P6z9oO8y6fiEvUsW+a1ek1Cn8y0Chgq91Wxevrn6Gc/90ORbtBsC9uJTQ5x/pfwHIsK5f6IuUbqQSyZzCv0zkO/gef2wFO55dwtG0Qagbiy3HF4308Nc1jzMmi4Fc9c5FgkPhLwNsWvUkH3/+89SEevuOxZeA6iXM85GjOS/Ln63euUgwKPxlgK4NT3MkvX29/YiDl9yHWR+t5zf2ca4+/mR/GygiOVP4ywC1R59Cz9b/osZ5PX8XqmHkGd/nrV31XK1yjUhZUPjLABOnz2ET/03Pi/dwyL4jOOjEzzNx7AwmZvnzdOetSPAo/CWlidPnwPSM9uoZlO68FQmmkN8NkPKWbgN1EfGXwl8KKj63P2zozluRAFHZRwpKc/tFgknhLwWnuf0iwaOyTzlqb4aVN3tfRURSUM+/XMQ3VfnAKHjsmvRr7GdBUzVFyo/CvxwkbqpiBtEoEM14jf3BaKqmSHlS2accJG6q4qJELUSUMNFQ9ZBr7A9FUzVFypN6/uUgYVOVaKiaG7o/x75uJ62RyVwdnUBDDj9amx11xVcAAAYnSURBVKSIlCeFf4BkXVtP2FRlWed4lr4wgqiDsJHzhimaqilSnhT+AZFYW68KhzivoY550+oAMgve2KYq47d1UdPalNeeuqZqipQfhX9AJNbWu3uj3PvCX/hVSztT7WWms54fPD2Zqy9ZMGQIq6cuIplQ+AdEvLa+uyeKAxxwTHQzd9X8K9X00sODPLJ6LA3jzh3yZ6mnLiJDUfgHRLzHvuzFDl5ufYrpbGCMvUE1vVRZFFwvJ4Q3AEOHv4jIUBT+AdIQepmG6nuJ1twN0V4IVQFVRKMRQlXVjJn6Sb+bKCJlQuEfFPEbtXrfJ0Rsw1wXgYYFsN9YbzpnjnfqiojEKfyDIn6jVjz4MW/u/pSLFPoikncK/6BIuFGLUBUcf5GCX0QKJqflHczsfDNbb2ZRM2tMeu5aM9tiZpvN7FMJxxvM7KXYcz8yM8ulDWUjfqPWKdfBoofhrFsU/CJSMLn2/NfhTT/5r8SDZnY0MB+YDBwKPGlmRzrnIsB/ApcBTcCjwGnAb3NsR3mI3aglIlJoOfX8nXMbnXObUzx1NnCfc263c+5VYAsww8xGA/s65553zjngF8BncmmDiIgMX6FW9RwDtCc87ogdGxP7Pvl4SmZ2mZm1mFnLjh07CtJQEZFKNGTZx8yeBA5J8dR1zrnl6V6W4pgb5HhKzrnbgdsBGhsb054nIiLDM2T4O+fmZPFzO4CxCY/rgNdix+tSHBcRkSIqVNlnBTDfzEaY2XhgAtDsnNsO7DSzmbFZPguAdJ8eRESkQHKd6nmOmXUAJwCPmNnvAJxz64H7gQ3AY8CVsZk+AFcAd+ANAr+CZvqIiBSdeZNugq+xsdG1tLT43QwRkZJiZq3Oucbk49rDV0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAIp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAKVf/i3N8PKm72vIiIC5L6Hb7C1N8NdcyHSDeEab4N07ZErIlLmPf+2lV7wu4j3tW2l3y0SEQmE8u75188mGqqGCBCqJlQ/2+8WiYgEQlmHf2t0Aj/o/hca3HpaI5O5OjqBBr8bJSISAGUd/k1bO2nuPYImdwRh8x43jKv1u1kiIr4r65r/zMNHUVMVImxQXRVi5uGj/G6SiEgglHXPv2FcLUsvmUnT1k5mHj5KvX4RkZiyDn/wLgAKfRGR/sq67CMiIqkp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQOef8bkNGzGwHsM3vdhTYgcAbfjciQPT36E9/j/709+gv3d9jnHPuoOSDJRP+lcDMWpxzjX63Iyj09+hPf4/+9Pfob7h/D5V9REQqkMJfRKQCKfyD5Xa/GxAw+nv0p79Hf/p79Desv4dq/iIiFUg9fxGRCqTwFxGpQAr/gDGzH5jZJjP7k5k9aGb7+90mP5nZ+Wa23syiZlax0/rM7DQz22xmW8zsGr/b4ycz+5mZvW5m6/xuSxCY2Vgze8bMNsb+W/lKJq9T+AfPE8AxzrnjgD8D1/rcHr+tA84F/uB3Q/xiZmFgMXA6cDRwoZkd7W+rfLUEOM3vRgRIL3CVc24SMBO4MpP/fyj8A8Y597hzrjf2sAmo87M9fnPObXTObfa7HT6bAWxxzm11znUD9wFn+9wm3zjn/gC86Xc7gsI5t90592Ls+53ARmDMUK9T+AfbF4Df+t0I8d0YoD3hcQcZ/MctlcfM6oHjgReGOrfst3EMIjN7EjgkxVPXOeeWx865Du/j3NJits0Pmfw9KpylOKY52tKPme0N/Br4qnPunaHOV/j7wDk3Z7DnzWwhcBbwCVcBN2IM9fcQOoCxCY/rgNd8aosEkJlV4wX/Uufcskxeo7JPwJjZacA/A3Odc+/63R4JhFXABDMbb2Y1wHxghc9tkoAwMwPuBDY65/4909cp/IPnx8A+wBNmtsbMbvO7QX4ys3PMrAM4AXjEzH7nd5uKLTYB4MvA7/AG8+53zq33t1X+MbN7geeBo8ysw8y+6HebfDYLuBg4JZYZa8zsjKFepOUdREQqkHr+IiIVSOEvIlKBFP4iIhVI4S8iUoEU/iIiFUjhLyJSgRT+IiIV6H8Ak8En/idUJxAAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# Vis\n", "fig, ax = plt.subplots()\n", @@ -257,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": 277, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -306,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 278, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -316,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": 279, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -328,22 +271,9 @@ }, { "cell_type": "code", - "execution_count": 280, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de3gd9X3n8fdHd1u2LGPJsrGELRNzMVCboBBayIUA4bJZoGnTmm0CSck6Scnm0ssuJO2W3Se0fbpJSdM0ZAlQ4GmAsCEUmkAeqHMhBAKRwWBzi21ssGxjyzbY8k3X7/5xRjDIx5asi4905vN6nvOcOb+ZOfMd8/A5o9/M/EYRgZmZZUNJoQswM7Mjx6FvZpYhDn0zswxx6JuZZYhD38wsQxz6ZmYZ4tA3K0KSfibpk4Wuw8Yfh76NG5LWSzq30HWYFTOHvtkoklRW6BrMDsWhbxOCpP8qaY2kHZLul3R00i5J10vaKmmnpGclnZzMu0jS85I6JG2U9OcH+e6PS/qlpH9KvuNFSeek5k+TdLOkzcn3fEVS6YB1r5e0A7g2z/eXSLpa0lpJ2yXdLemoZN48SSFpqaRNyTb+LLVupaSvJ/M2JdOVqfmXSFohaVfy/RekNj03qa1D0kOS6kb0H8GKgkPfxj1JHwD+FvgDYDbwCnBXMvuDwHuB44Ba4A+B7cm8m4FPRcRU4GTgJ4fYzLuBl4E64K+BH/QHM3Ab0AO8Azg12eYn86w7E7guz3d/DrgUeB9wNPA68M8DljkbWJB899Wpbq4vA2cAi4FFwOnAXyb/LqcDtwN/kez7e4H1qe/8L8AnkroqgLw/epYxEeGXX+PiRS6wzs3TfjPw96nPU4BuYB7wAeA35IKxZMB6rwKfAmoG2e7HgU2AUm1PAh8DGoBOYFJq3mXAT1PrvjrI978AnJP6PDupvyzZhwBOSM3/e+DmZHotcFFq3vnA+mT6/wLXH2SbPwP+MvX5T4AfF/q/sV+Ff/lI3yaCo8kd3QMQEbvJHc3PiYifAN8kd+S8RdKNkmqSRX8PuAh4RdLPJf32IbaxMSLSow++kmx3LlAObJb0hqQ3yIXtzNSyGwapfy5wb2r9F4Becj8o+b6jf9sH7PuAeU3kfhQO5rXU9F5yP5aWcQ59mwg2kQtOACRVAzOAjQAR8Y2IOA04iVw3z18k7b+OiEvIBfS/AXcfYhtzJCn1+ZhkuxvIHenXRURt8qqJiJNSyw42VO0G4MLU+rURURURG1PLNOXZ9gH7PmDeBuDYQbZt9jYOfRtvyiVVpV5lwB3AJyQtTk5i/g3wRESsl/QuSe+WVA7sAfYDvZIqJP2RpGkR0Q3sInd0fTAzgc9JKpf0EeBE4IGI2Aw8BHxNUk1yUvZYSe87jH36NnCdpLkAkuolXTJgmb+SNFnSSeT64b+XtN8J/GWyTh3wP4F/TebdnPy7nJPUNUfSCYdRl2WQQ9/GmweAfanXtRGxDPgr4B5gM7mj2yXJ8jXAd8idHH2FXLfPV5N5HwPWS9oFfBr46CG2+wS5E6nbyJ2M/f2I6D8hfDm5E6HPJ9v5Prl++aH6R+B+4CFJHcCvyJ38Tfs5sAZYBnw1Ih5K2r8CtALPAiuBp5I2IuJJcj8Q1wM7k++Yi9kh6O3dmGbZI+njwCcj4qwCbHsesA4oj4ieI719yx4f6ZuZZYhD38wsQ9y9Y2aWIT7SNzPLkHE/OFRdXV3Mmzev0GWYmU0oy5cv3xYR9QPbx33oz5s3j9bW1kKXYWY2oUh6JV+7u3fMzDLEoW9mliEOfTOzDHHom5lliEPfzCxDHPpmZhni0Dczy5CiDf3bHlvPvz+zafAFzcwypGhD/65fb+C+FRsHX9DMLEOKNvQbaip5bdf+QpdhZjauFG3oz6qpYsuuzkKXYWY2rhRt6M+sqWLb7k66e/sKXYqZ2bhRtKE/q6aKCNi220f7Zmb9ijb0G2oqAXhtp/v1zcz6FXHoVwG4X9/MLCUDoe8jfTOzfkUb+jOqKygrkUPfzCylaEO/pETMnOpr9c3M0oo29AEaplWx1X36ZmZvKu7Qn1rlI30zs5RBQ19Sk6SfSnpB0nOSPp+0HyXpYUmrk/fpqXWukbRG0kuSzk+1nyZpZTLvG5I0NruVM2talfv0zcxShnKk3wP8WUScCJwBXCVpIXA1sCwiFgDLks8k85YAJwEXAN+SVJp81w3AUmBB8rpgFPflADNrKunY38Perp6x3IyZ2YQxaOhHxOaIeCqZ7gBeAOYAlwC3JYvdBlyaTF8C3BURnRGxDlgDnC5pNlATEY9HRAC3p9YZE7N8rb6Z2dscVp++pHnAqcATQENEbIbcDwMwM1lsDrAhtVpb0jYnmR7Ynm87SyW1Smptb28/nBLfpv9afd+Va2aWM+TQlzQFuAf4QkTsOtSiedriEO0HNkbcGBEtEdFSX18/1BIP4Bu0zMzebkihL6mcXOB/NyJ+kDRvSbpsSN63Ju1tQFNq9UZgU9LemKd9zPSPv+PQNzPLGcrVOwJuBl6IiH9IzbofuCKZvgK4L9W+RFKlpGZyJ2yfTLqAOiSdkXzn5al1xsTUqnKqK0p92aaZWaJsCMucCXwMWClpRdL2JeDvgLslXQm8CnwEICKek3Q38Dy5K3+uiojeZL3PALcCk4AHk9eY8g1aZmZvGTT0I+JR8vfHA5xzkHWuA67L094KnHw4BY6Ub9AyM3tLUd+RC75By8wsrehDf2ZNJVt3dZK7NcDMLNuKPvRn1VTR1dvH63u7C12KmVnBFX3o+wYtM7O3ZCb0t3Q49M3MMhD6yQ1aPtI3Myv+0J851YOumZn1K/rQrygrYUZ1ha/VNzMjA6EPuX79rQ59M7OshL4fkG5mBhkJ/dxdue7TNzPLROjPnFrF9j2ddPf2FboUM7OCykToz5pWRQS0d/ho38yyLROh33+tvvv1zSzrMhL6uWv1fQWPmWVdpkLf4++YWdZlIvSPmlxBeanY4j59M8u4TIR+SYmYObXK4++YWeYN5cHot0jaKmlVqu17klYkr/X9z86VNE/SvtS8b6fWOU3SSklrJH0jeTj6EdNQU+mRNs0s84byYPRbgW8Ct/c3RMQf9k9L+hqwM7X82ohYnOd7bgCWAr8CHgAu4Ag8GL1fQ00Vv9nScaQ2Z2Y2Lg16pB8RjwA78s1Ljtb/ALjzUN8haTZQExGPR+65hbcDlx5+ucOXG3/Hffpmlm0j7dN/D7AlIlan2polPS3p55Lek7TNAdpSy7QlbXlJWiqpVVJre3v7CEvMaaipoqOzhz2dPaPyfWZmE9FIQ/8y3n6Uvxk4JiJOBf4UuENSDZCv//6gTyqPiBsjoiUiWurr60dYYs6sacnDVHytvpll2LBDX1IZ8GHge/1tEdEZEduT6eXAWuA4ckf2janVG4FNw932cDQkD1PxXblmlmUjOdI/F3gxIt7stpFUL6k0mZ4PLABejojNQIekM5LzAJcD941g24etYVr/Xbnu1zez7BrKJZt3Ao8Dx0tqk3RlMmsJB57AfS/wrKRngO8Dn46I/pPAnwFuAtaQ+wvgiF25A6m7cn2kb2YZNuglmxFx2UHaP56n7R7gnoMs3wqcfJj1jZoplWVMqSxzn76ZZVom7sjtN7Om0qFvZpmWqdCfVeMnaJlZtmUq9BtqqjzSppllWuZCf2vHfnI3BZuZZU/GQr+S7t5gx56uQpdiZlYQmQr9Wcllm+7XN7OsylToz3wz9N2vb2bZlKnQnzXNoW9m2Zap0K+fkht0zXflmllWZSr0K8pKqJtS4T59M8usTIU+kHtWro/0zSyjMhf6s6Y59M0suzIX+rOnVdH2+r5Cl2FmVhCZC/3mump27uvmdd+gZWYZlLnQn19fDcDL2/YUuBIzsyMvc6HfXDcFgHUOfTPLoMyFfuP0SZSViHXbdhe6FDOzIy5zoV9eWsIxR032kb6ZZdJQnpF7i6Stklal2q6VtFHSiuR1UWreNZLWSHpJ0vmp9tMkrUzmfSN5QHpBNNdV83K7Q9/MsmcoR/q3Ahfkab8+IhYnrwcAJC0k98D0k5J1viWpNFn+BmApsCB55fvOI6K5rpr12/fQ1+dx9c0sWwYN/Yh4BNgxxO+7BLgrIjojYh2wBjhd0mygJiIej9wTTG4HLh1u0SPVXF/N/u4+j8FjZpkzkj79z0p6Nun+mZ60zQE2pJZpS9rmJNMD2/OStFRSq6TW9vb2EZSYX3Nd7rJN9+ubWdYMN/RvAI4FFgObga8l7fn66eMQ7XlFxI0R0RIRLfX19cMs8eDmJ5dt+lp9M8uaYYV+RGyJiN6I6AO+A5yezGoDmlKLNgKbkvbGPO0F0VBTyaTyUtb5ZK6ZZcywQj/po+/3u0D/lT33A0skVUpqJnfC9smI2Ax0SDojuWrncuC+EdQ9IpJorqv2tfpmljllgy0g6U7g/UCdpDbgr4H3S1pMrotmPfApgIh4TtLdwPNAD3BVRPQmX/UZclcCTQIeTF4F01xfzXMbdxayBDOzI27Q0I+Iy/I033yI5a8DrsvT3gqcfFjVjaH5ddX8eNVrdPX0UVGWuXvUzCyjMpt2zXXV9PYFG17fW+hSzMyOmEyHPuCTuWaWKQ59X7ZpZhmS2dCvnVzBUdUVvlbfzDIls6EP+LJNM8sch76P9M0sQzIf+lt2dbKns6fQpZiZHRGZDv35PplrZhmT6dBv9kPSzSxjMh3682b4Wn0zy5ZMh35VeSlzaif5Ch4zy4xMhz7A/HpfwWNm2ZH50G+uq+blbXvIPcXRzKy4OfTrqunY38P2PV2FLsXMbMw59H3ZppllSOZDv/95ub6Cx8yyIPOhP2f6JMpL5Wv1zSwTMh/6pSVi7gwPvGZm2TBo6Eu6RdJWSatSbf9H0ouSnpV0r6TapH2epH2SViSvb6fWOU3SSklrJH0jeUD6uOCB18wsK4ZypH8rcMGAtoeBkyPit4DfANek5q2NiMXJ69Op9huApcCC5DXwOwtmfl0167fvpbfPl22aWXEbNPQj4hFgx4C2hyKif2jKXwGNh/oOSbOBmoh4PHIXxN8OXDq8kkdfc101XT19bHpjX6FLMTMbU6PRp//HwIOpz82Snpb0c0nvSdrmAG2pZdqStrwkLZXUKqm1vb19FEo8NF+2aWZZMaLQl/RloAf4btK0GTgmIk4F/hS4Q1INkK///qB9KRFxY0S0RERLfX39SEockv7RNh36Zlbsyoa7oqQrgA8B5yRdNkREJ9CZTC+XtBY4jtyRfboLqBHYNNxtj7b6KZVMqSxz6JtZ0RvWkb6kC4D/AVwcEXtT7fWSSpPp+eRO2L4cEZuBDklnJFftXA7cN+LqR4mkN8fgMTMrZkO5ZPNO4HHgeEltkq4EvglMBR4ecGnme4FnJT0DfB/4dET0nwT+DHATsAZYy9vPAxScH5JuZlkwaPdORFyWp/nmgyx7D3DPQea1AicfVnVHUHNdNf/+7Cb2d/dSVV5a6HLMzMZE5u/I7Xf8rKlEwEuvdRS6FDOzMePQTyxqqgXgmbY3ClyJmdnYcegnjp5WRf3USla86tA3s+Ll0E9IYlFjLSs2OPTNrHg59FNOPaaWl7ftYefe7kKXYmY2Jhz6KYsa3a9vZsXNoZ/yW03TkHAXj5kVLYd+Sk1VOcfWT+EZh76ZFSmH/gD9J3OT4YTMzIqKQ3+AxcfUsn1PF22ve2x9Mys+Dv0BTk1u0nK/vpkVI4f+AMfPmkplWYlD38yKkkN/gPLSEk6eM82hb2ZFyaGfx+KmWlZt3El3b1+hSzEzG1UO/TwWNdXS2dPnETfNrOg49PPoP5n7tLt4zKzIOPTzaJw+iaOqK3yTlpkVHYd+HpJY3OQRN82s+AzlGbm3SNoqaVWq7ShJD0tanbxPT827RtIaSS9JOj/Vfpqklcm8byQPSB+3FjfVsrZ9N7v2e8RNMyseQznSvxW4YEDb1cCyiFgALEs+I2khsAQ4KVnnW5L6Hzh7A7AUWJC8Bn7nuLKoqZYIWNm2s9ClmJmNmkFDPyIeAXYMaL4EuC2Zvg24NNV+V0R0RsQ6YA1wuqTZQE1EPB65QW1uT60zLi1u9J25ZlZ8htun3xARmwGS95lJ+xxgQ2q5tqRtTjI9sH3cmja5nPl11TztxyeaWREZ7RO5+frp4xDt+b9EWiqpVVJre3v7qBV3uBY1ecRNMysuww39LUmXDcn71qS9DWhKLdcIbEraG/O05xURN0ZES0S01NfXD7PEkVvcVMu23Z1s2rm/YDWYmY2m4Yb+/cAVyfQVwH2p9iWSKiU1kzth+2TSBdQh6Yzkqp3LU+uMW4v6R9x0F4+ZFYmhXLJ5J/A4cLykNklXAn8HnCdpNXBe8pmIeA64G3ge+DFwVUT0Jl/1GeAmcid31wIPjvK+jLoTZ0+lorTEz8w1s6JRNtgCEXHZQWadc5DlrwOuy9PeCpx8WNUVWGVZKQuPrvGRvpkVDd+RO4jFTbWs3LiTHo+4aWZFwKE/iMVNtezr7uWlLR5x08wmPof+IFrm5UaYeGzN9gJXYmY2cg79QTROn8wJs6by8AtbCl2KmdmIOfSH4LyFDbSu38GOPV2FLsXMbEQc+kNw7okN9AX89MWtgy9sZjaOOfSH4JQ502ioqeTh593FY2YTm0N/CEpKxDknNvDI6nb2d/cOvoKZ2Tjl0B+i8xY2sLerl8df9lU8ZjZxOfSH6Lfnz2ByRam7eMxsQnPoD1FVeSnvO66eZS9soa/PQy2b2cTk0D8M557YwJZdnazc6EcomtnE5NA/DGefMJMSwX/4Ri0zm6Ac+ofhqOoKWuYd5X59M5uwHPqH6bwTG3jxtQ427Nhb6FLMzA6bQ/8wnbuwAXAXj5lNTA79w9RcV807Zk5xF4+ZTUgO/WE4b2EDT6zbwc693YUuxczssDj0h+HcExvo7Qt+9hsPwGZmE8uwQ1/S8ZJWpF67JH1B0rWSNqbaL0qtc42kNZJeknT+6OzCkXdqUy11UyrcxWNmE86gD0Y/mIh4CVgMIKkU2AjcC3wCuD4ivppeXtJCYAlwEnA08B+SjouICTeCWUmJOOeEBh5YuZmunj4qyvwHk5lNDKOVVucAayPilUMscwlwV0R0RsQ6YA1w+iht/4g7b2EDHZ09PLHOA7CZ2cQxWqG/BLgz9fmzkp6VdIuk6UnbHGBDapm2pO0AkpZKapXU2t7ePkoljq4z31HH5IpS/u3pTYUuxcxsyEYc+pIqgIuB/5c03QAcS67rZzPwtf5F86yed+SyiLgxIloioqW+vn6kJY6JSRWl/P5pjfz7M5vY2rG/0OWYmQ3JaBzpXwg8FRFbACJiS0T0RkQf8B3e6sJpA5pS6zUCE/ow+RNnNtPV28e//urVQpdiZjYkoxH6l5Hq2pE0OzXvd4FVyfT9wBJJlZKagQXAk6Ow/YJprqvmnBNm8t1fveInapnZhDCi0Jc0GTgP+EGq+e8lrZT0LHA28EWAiHgOuBt4HvgxcNVEvHJnoCvPamb7ni7uXzGh/2gxs4xQxPh+IEhLS0u0trYWuoyDiggu/MdfAPDg59+DlO/UhZnZkSVpeUS0DGz3BeYjJIk/PquZF1/r4LG1vnzTzMY3h/4ouHjR0dRNqeDmR9cVuhQzs0Ny6I+CqvJS/ujdc/nJi1tZ27670OWYmR2UQ3+UfPSMuVSUlnDrL9cXuhQzs4Ny6I+S+qmVXLL4aL6/vI039nYVuhwzs7wc+qPoE2c2s6+7lzuf3DD4wmZmBeDQH0ULj67hd46dwe2Pr6e7t6/Q5ZiZHcChP8quPKuZzTv38+Cq1wpdipnZARz6o+zs42fSXFfNPy1bTVePj/bNbHxx6I+ykhLx5YtOZPXW3XznFy8Xuhwzs7dx6I+Bcxc2cNEps/jHZatZt21PocsxM3uTQ3+MXPufT6KyrIQv/WAl4318IzPLDof+GJlZU8XVF57A4y9v556nNha6HDMzwKE/pi571zG0zJ3OV370PNt3dxa6HDMzh/5YKikRf/vhU9jT2cNXfvRCocsxM3Poj7UFDVP5zPuO5d6nN/KL1ePzIe9mlh0O/SPgT85+B/PrqvnyvavY1zXhHxZmZhOYQ/8IqCov5W8+fAqv7tjL15f9ptDlmFmGjfQZueuT5+GukNSatB0l6WFJq5P36anlr5G0RtJLks4fafETyRnzZ7DkXU3c+MjL/PBZP0/XzApjNI70z46IxalnMV4NLIuIBcCy5DOSFgJLgJOAC4BvSSodhe1PGNdefBItc6fzxe+t4JdrthW6HDPLoLHo3rkEuC2Zvg24NNV+V0R0RsQ6YA1w+hhsf9yqKi/lpsvfxfy6KSy9vZVVG3cWuiQzy5iRhn4AD0laLmlp0tYQEZsBkveZSfscID3QfFvSdgBJSyW1Smptby+uK16mTS7n9itPp3ZyBR//lydZ72EazOwIGmnonxkR7wQuBK6S9N5DLKs8bXnHJ4iIGyOiJSJa6uvrR1ji+NNQU8XtV55Ob19w+S1PsrVjf6FLMrOMGFHoR8Sm5H0rcC+57potkmYDJO9bk8XbgKbU6o1AZs9oHls/hX/5xOm0d3RyxS2/Ztf+7kKXZGYZMOzQl1QtaWr/NPBBYBVwP3BFstgVwH3J9P3AEkmVkpqBBcCTw91+MVjcVMsNH30nq7d0sPT2VnZ39hS6JDMrciM50m8AHpX0DLnw/lFE/Bj4O+A8SauB85LPRMRzwN3A88CPgasiIvN3Kr3/+Jl89SOLeHLdDi7+5qO89FpHoUsysyKm8T7sb0tLS7S2tha6jDH3q5e389k7nmZPZw9/8+GT+d1TGwtdkplNYJKWpy6lf5PvyB0nzpg/gwc+dxanNE7ji997hi/du5L93Zn/Q8jMRplDfxyZWVPFHZ98N59633zueOJVPvLtx9mwY2+hyzKzIuLQH2fKSku45sITufFjp7F++x4+9E+P8t0nXqG71w9ZN7ORc+iPUx88aRY//G9ncVzDFL587yo+eP0j/OjZzX70opmNiEN/HJs7o5q7P/Xb3HR5C+Wl4qo7nuLib/6SR1d73B4zGx6H/jgniXMXNvDg59/LVz+yiB17uvjozU/w0Zue4LE12+jr85G/mQ2dL9mcYPZ39/LdJ17lmz9Zzet7u5lTO4nfe+ccPvzORubVVRe6PDMbJw52yaZDf4La19XLQ8+/xveXt/Homm1EwLvmTef33tnIhafMZtqk8kKXaGYF5NAvYpt37uPepzdyz/I21rbvoURwSmMtv3PsDM48to7T5k5nUkWmHl1glnkO/QyICJ5p28lPXtzKY2u2sWLDG/T0BRWlJZx6TC3vnj+DhbNrOHH2VJqmT6akJN/Ap2ZWDBz6GbSns4dfr9/BY2u389jabTy3aRf9/7mrK0o5ftZUTphdwwmzptJ01GSapk+mcfokqsr9V4HZROfQN/Z19fKbLR28+NouXtjcwQubd/Hiax3s3Pf2YZ3rplTSOH0SjdMnMXNqFTOmVFA/pZIZUyqYMaWSuikV1E6uoLqiFMl/LZiNRwcL/bJCFGOFMamilEVNtSxqqn2zLSLY2tFJ2+t7aXt9Hxt25N7bXt/Hqo07ae/Yyp6u/GMAlQhqJpUztaqMmqrc+5TKciZXlDK5opRJyfvkijImlZdSUVZCZVlJ8l5KZfK5rLSEslJRXlJCeZkoKymhvFSUluSmS0v05qsseZegVKJEcjeV2WFw6GecJBpqqmioqeK0ufmX2dfVy7bdnWzf08X23Z1s293Jzn3d7NrXw6793XTs72HXvm527e+m7fW97O/uZW9XL/u6etnb3UvvEbiXoLRElCi3PyUCkXsvkSB5TybfXAbeasv9W+TWe3vbWz8oUu715udkqf62g/30vO073jbjIMsPaY8Pvo3xbuJUWng//NxZVJaNbnerQ98GNamiNNfnf9Tkw143Iujq7WN/Vx+dvb10dvfR2dNHV08fnT29dPb00dMbdPcl7719dPfmpnv7gt4IevqC3t4+egN6evvoC+iLoC+Z39cX9AX0RhCR22ZfMt2/LPBmW/DWvP4ndubWe2te/5y3puNtD/eM1P6lPx+4/weuk17vgOWH+g874pUKIyZSseOAxuAn0qFvY0pS0pVTCvjeAbNC8zAMZmYZ4tA3M8uQkTwYvUnSTyW9IOk5SZ9P2q+VtFHSiuR1UWqdayStkfSSpPNHYwfMzGzoRtKn3wP8WUQ8JWkqsFzSw8m86yPiq+mFJS0ElgAnAUcD/yHpOD8c3czsyBn2kX5EbI6Ip5LpDuAFYM4hVrkEuCsiOiNiHbAGOH242zczs8M3Kn36kuYBpwJPJE2flfSspFskTU/a5gAbUqu1cZAfCUlLJbVKam1vbx+NEs3MjFEIfUlTgHuAL0TELuAG4FhgMbAZ+Fr/onlWz3vRbkTcGBEtEdFSX18/0hLNzCwxotCXVE4u8L8bET8AiIgtEdEbEX3Ad3irC6cNaEqt3ghsGsn2zczs8Ax7wDXl7vu+DdgREV9Itc+OiM3J9BeBd0fEEkknAXeQ+xE4GlgGLBjsRK6kduCVYRUJdUAWHyjr/c4W73e2DHW/50bEAV0lI7l650zgY8BKSSuSti8Bl0laTK7rZj3wKYCIeE7S3cDz5K78uWooV+7kK3qoJLXmG2Wu2Hm/s8X7nS0j3e9hh35EPEr+fvoHDrHOdcB1w92mmZmNjO/INTPLkGIP/RsLXUCBeL+zxfudLSPa73H/5CwzMxs9xX6kb2ZmKQ59M7MMKcrQl3RBMpLnGklXF7qesZQMdbFV0qpU21GSHpa0OnmffqjvmIgOMcprUe+7pCpJT0p6Jtnv/5W0F/V+A0gqlfS0pB8mn4t+nwEkrZe0Mhm1uDVpG/a+F13oSyoF/hm4EFhI7r6BhYWtakzdClwwoO1qYFlELCB3E1wx/vD1j/J6InAGcFXy37nY970T+EBELCI31MkFks6g+Pcb4PPkBnbsl4V97nd2RCxOXZ8/7H0vutAnd8fvmoh4OSK6gLvIjfBZlCLiEWDHgOZLyN0tTfJ+6REt6gg4xCivRb3vkbM7+VievIIi329JjcB/Am5KNRf1Pg9i2PtejKE/5NE8i1hD/1AYyfvMAtczpgaM8lr0+1vcQyoAAAGkSURBVJ50c6wAtgIPR0QW9vvrwH8H+lJtxb7P/QJ4SNJySUuTtmHvezE+GH3Io3naxDdwlNfckFDFLRm+ZLGkWuBeSScXuqaxJOlDwNaIWC7p/YWupwDOjIhNkmYCD0t6cSRfVoxH+h7NE7ZImg25AfDIHREWnXyjvJKRfQeIiDeAn5E7p1PM+30mcLGk9eS6az8g6V8p7n1+U0RsSt63AveS68Ie9r4XY+j/GlggqVlSBblHNN5f4JqOtPuBK5LpK4D7CljLmEhGeb0ZeCEi/iE1q6j3XVJ9coSPpEnAucCLFPF+R8Q1EdEYEfPI/f/8k4j4KEW8z/0kVSePo0VSNfBBYBUj2PeivCM3eRj714FS4JZkoLeiJOlO4P3khlvdAvw18G/A3cAxwKvARyJi4MneCU3SWcAvgJW81c/7JXL9+kW775J+i9yJu1JyB213R8T/ljSDIt7vfkn3zp9HxIeysM+S5pM7uodcd/wdEXHdSPa9KEPfzMzyK8buHTMzOwiHvplZhjj0zcwyxKFvZpYhDn0zswxx6JuZZYhD38wsQ/4/XHpljCUeAlgAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "plt.plot(loss_history)\n", "plt.title('Loss per epoch');" @@ -351,22 +281,9 @@ }, { "cell_type": "code", - "execution_count": 281, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dfXxcVb3v8c9vJkkr8hQKSGlKU6TQUqClSWuxVAGL8mQRCoeC0lbl4SDeq0de3AMHBUU5elTOBY/1cBC0KAUOaKEVEHn0WoGQJrSVPkopqYlUKSFAe4EmmVnnjz2TTiYzyWSe9p6Z7/v16iuZPXsyK1G+e81vrb2WOecQEZHKEvK7ASIiUnwKfxGRCqTwFxGpQAp/EZEKpPAXEalACn8RkQqk8BcRqUAKfylpZtZmZt1mdmDS8TVm5sysPva4zsx+bWZvmNnbZvaSmS2KPVcfO3dX0r8LMmzD982s3czeMbNtZnZd0vO3m9lmM4vG3zPhuduS3nO3me0c5L3CZvYdM3vNzHaa2Woz2z+TdookUvhLOXgVuDD+wMyOBT6QdM4vgXZgHDAKWAD8Pemc/Z1zeyf8++8M3/9OYKJzbl/go8BFZnZuwvNrgS8BLya/0Dn3j4nvCdwLPDDIe30r9h4nAPsCFwPvZ9hOkT5VfjdAJA9+iRfm/xF7vBD4BfCdhHOmA//knPv/scer8/XmzrnNSYeiwBEJzy8GMLNBQ9rMPgjMA85K83wt8FVginNuW+zwuiybLRVOPX8pB03AvmY2yczCwAXA3SnOWWxm883ssOH8cDO7yMz+NMQ515jZLqAD+CBwz3DeI2YesAP4Q5rnjwV6gfPM7G9m9mczuzKL9xFR+EvZiPf+TwU2AX9Nev58YCXwDeDV2JjA9KRz3jCztxL+TQJwzt3jnDtusDd3zn0P2AeYFmvL21n8DguBX7j0C27VAfsBRwLjgfOAb5rZqVm8l1Q4hb+Ui18CFwGL8Eo+/Tjnupxz1zjnJgMfAtYAD5mZJZx2oHNu/4R/G4fTAOdZDbyHV5vPmJmNBT6equ0J3ot9vdE5955z7k/AfcAZw3kvEVD4S5mI1cBfxQvCZUOc+wbwQ+BQ4IACNKcK+PAwX7MAeM45t3WQc+KlJy3FKzlT+Es5+SJwSsKgbh8z+zczO8bMqsxsH+AKYItzrjOXNzSzkJldbma15pkBXAk8lXBOjZmNBAyoNrORZpb8394CYMlg7+WcewWvdHWdmY2IlaUuAB7O5XeQyqTwl7LhnHvFOdeS5um9gAeBt4CteFM+5yad81bSnPuvAZjZZ81s/SBvfQ7wCrATb6D5P9gz8wjgcbySzUeB22Pffyz+pJmdgFfPHzDF08x+a2b/knDowljbO4FHgG84555Kfp3IUEybuYiIVB71/EVEKpDCX0SkAin8RUQqkMJfRKQClczaPgceeKCrr6/3uxkiIiWltbX1DefcQcnHSyb86+vraWlJN4tPRERSMbNtqY6r7CMiUoEU/iIiFUjhLyJSgUqm5p9KT08PHR0dvP9+eW9kNHLkSOrq6qiurva7KSJSJko6/Ds6Othnn32or6+n/8q85cM5R2dnJx0dHYwfP97v5ohImSjpss/777/PqFGjyjb4AcyMUaNGlf2nGxEprpIOf6Csgz+uEn5HkVLTuq2Lxc9soXVbl99NyUpJl31ERPzQuq2Lz97RRHdvlJqqEEsvmUnDuFq/mzUsJd/zD5JvfvOb/PCHP0z7/EMPPcSGDRuK2CIRKYSmrZ1090aJOujpjdK0Nac9gXyh8C8ihb9IeZh5+ChqqkKEDaqrQsw8fJTfTRq2igv/fNfpbrrpJo466ijmzJnD5s2bAfjpT3/K9OnTmTJlCvPmzePdd9/lueeeY8WKFVx99dVMnTqVV155JeV5IhJ8DeNqWXrJTL72yaNKsuQDFRb+8TrdzY9v5rN3NOV8AWhtbeW+++5j9erVLFu2jFWrVgFw7rnnsmrVKtauXcukSZO48847+ehHP8rcuXP5wQ9+wJo1a/jwhz+c8jwRKQ0N42q58uQjSjL4ocIGfFPV6XL5H27lypWcc8457LXXXgDMnettCbtu3Tq+/vWv89Zbb7Fr1y4+9alPpXx9pueJiORbRfX8C1GnSzUNc9GiRfz4xz/mpZde4oYbbkg7Rz/T80RE8q2iwj/fdbqPfexjPPjgg7z33nvs3LmT3/zmNwDs3LmT0aNH09PTw9KlS/vO32effdi5c2ff43TniYgUWkWVfcC7AOSrRjdt2jQuuOACpk6dyrhx45g9ezYA3/72t/nIRz7CuHHjOPbYY/sCf/78+Vx66aX86Ec/4le/+lXa80RECs2cc363ISONjY0ueTOXjRs3MmnSJJ9aVFyV9LuKSP6YWatzrjH5eEWVfURExJOX8Dezn5nZ62a2LuHYAWb2hJm9HPtam/DctWa2xcw2m5mmuIiIFFm+ev5LgNOSjl0DPOWcmwA8FXuMmR0NzAcmx17zEzML56kdIiKSgbyEv3PuD8CbSYfPBu6KfX8X8JmE4/c553Y7514FtgAz8tEOERHJTCFr/h9yzm0HiH09OHZ8DNCecF5H7JiIiBSJHwO+qRanTznlyMwuM7MWM2vZsWNHgZslIlI5Chn+fzez0QCxr6/HjncAYxPOqwNeS/UDnHO3O+canXONBx10UAGbGhx77723300QkQpQyPBfASyMfb8QWJ5wfL6ZjTCz8cAEoLmA7fBdJBLxuwkiIv3ka6rnvcDzwFFm1mFmXwS+B5xqZi8Dp8Ye45xbD9wPbAAeA650zhUvHdubYeXN3tc8aGtrY+LEiSxcuJDjjjuO8847j3fffZf6+npuvPFGTjzxRB544AFeeeUVTjvtNBoaGpg9ezabNm0C4NVXX+WEE05g+vTpfOMb38hLm0REhpKX5R2ccxemeeoTac6/CbgpH+89LO3NcNdciHRDuAYWroCxuU802rx5M3feeSezZs3iC1/4Aj/5yU8AGDlyJH/84x8B+MQnPsFtt93GhAkTeOGFF/jSl77E008/zVe+8hWuuOIKFixYwOLFi3Nui4hIJirrDt+2lV7wu4j3tW1lXn7s2LFjmTVrFgCf+9zn+gL/ggsuAGDXrl0899xznH/++UydOpXLL7+c7du3A/Dss89y4YXetfPiiy/OS3tERIZSWQu71c/2evzxnn/97Lz82ORlneOPP/jBDwIQjUbZf//9WbNmTUavF5HS07qti6atncw8fFRJbPBSWT3/sTO8Us8p1+Wt5APwl7/8heeffx6Ae++9lxNPPLHf8/vuuy/jx4/ngQceAMA5x9q1awGYNWsW9913H4CWdRYpUfneJbAYKiv8wQv82VflLfgBJk2axF133cVxxx3Hm2++yRVXXDHgnKVLl3LnnXcyZcoUJk+ezPLl3uSnW2+9lcWLFzN9+nTefvvtvLVJRIon1S6BeZHnCSqJKqvsUyChUIjbbrut37G2trZ+j8ePH89jjz024LXjx4/v+9QAcM011xSkjSJSOPFdAnt6o3nbJbBQE1TiFP4iIjmK7xLYtLWT2r1q+nr+OdX+U01QUfgHR319PevWrRv6RBEpKcMdwI2f89k7mujujVJTFcptu9gCTVCJK/nwd86V/WyZUtltTaRcxAdwhxviqWr/WYd/fIJK20ov+PPY64cSD/+RI0fS2dnJqFGjyvYC4Jyjs7OTkSNH+t0UkYqRbYjnvfY/dkbeQz+upMO/rq6Ojo4Oyn3Fz5EjR1JXV+d3M0QqRrYhnlj7D/p8/5LewF1EpFCCcNNWPtqQbgP3ku75i4gUSsO4Wl977tmOO2Sq8m7yEhEpAQW7cSxG4S8iEkDxcYewkb8bxxKo7CMikk/tzbD2HsBgyoVZz9Yp9OCxwl9EJB/iof/i3RDt8Y6tXgqLHs7pAlCocQeFv4hIrlqWwKNXQbS3//ECLMuQL6r5i4jkor05dfBDQZZlyBf1/EXEd0GYUz8siXV9HLjonucsDEedDnsfnFPNv9AU/iLiq0LPZ8+79maiPz8Ti3YDYKFqCFV5PX8LwRk3Q+Mif9uYAYW/iPgqr4uhFcFf1zzOIZEeQrHlxFy0F2v8POxXV5AF2ApF4S8ivhruOjp+l4iejxzNpwkTcl6NPxKqpirA5Z10FP4i4qvhzGcvaomovTnlcsrjjz+Zha3X82n3/wiFjOPPuIKJJRb8oPAXkQAYaj57vLf/2lvvFadENMgWig3jarn6kgU0bT2TmYePYmKAS1SDUfiLSKAl9varQkZVOEQkkse9clMZYgtFvxd9yweFv4gU3XDq9okDwpGo44IZYxmz/wcKW/Mf5haKfo9DZEPhLyJFNdy6ffKA8LxpdYUP2GFsoVhyU1VjFP4iUlTDndrp2+5YGW6hWGpTVeMU/iJSVNlskRjkGnve9+0tEm3jKCJFl1wjz7Rmnu48v2vufr//YLSNo4gERmJPPtOaebrzglBzD/Ink3S0qqeI+CrT7QrTnVfo7Q7LVcF7/mbWBuwEIkCvc67RzA4A/huoB9qAf3DOdRW6LSISPJnWzNOdV6o1d78VvOYfC/9G59wbCce+D7zpnPuemV0D1Drn/nmwn6Oav0j5KvWaf5AFreZ/NnBS7Pu7gN8Dg4a/iJSvTGvm6c5rCL1MQ9VKCM0GSm+dHT8UI/wd8LiZOeC/nHO3Ax9yzm0HcM5tN7ODU73QzC4DLgM47LDDitBUESkp8U1VVt/jraeftA6PpFeM8J/lnHstFvBPmNmmTF8Yu1DcDl7Zp1ANFJES1LdvbgSvj0lg9swthTJUwcPfOfda7OvrZvYg3meyv5vZ6FivfzTweqHbISJlIN7T37UDNv/WW3itjwViz9wgTD3NREHD38w+CISccztj338SuBFYASwEvhf7uryQ7RCREhcP/RfvhmjPwOctDA0LA7Fnbqks91Donv+HgAfNLP5e9zjnHjOzVcD9ZvZF4C/A+QVuh4hkIJDlilTlnT4GoXCg9s0tlamnWt5BRIDilCuGfXFpb4afn+4N5iYLVcO0z8GUi3zv7ScL0kU0aFM9RSQAEkOq0OWKrC4ubSvBRfc8tjAcdTrsfXAgSjzplMJyDwp/kQqVHMbXnzW5oOWKrC4u9bMhPAIiu8FCgSrvlDqFv0iFSg7jrne7C7pu/qC18CdugI0rYNJcOPVbe44PY1MVGR6Fv0iFShXGhSxXpN2U5YkbcM/e4n3/7C0YDLwAKPTzTgO+IhUsXvOv3auGrne789bjH3LAs725rzf//v2XMuKdNszAOdi9bz0jr1qbcxvEowFfkQII0qyObMTbnM9ZPoMO7KZYjuHV2pOY+E4b8X7ouv0+zoCkkrxT+ItkqVTu5BzKshc72N0TxZGfWT5pB3bbm+GuudD7PonLMex72DH89G9VnGrNPOFm0JBY8pGCUfiLZKlU7uQcTOu2Lh5oae+7dSoczn2WT9qB3baV3to7fe/mLccwZuonaTjuSh4t4U9QpUjhL5KlUrmTczBNWzvpjXphbMCkQ/bJ+Wc2jKvlobnVdG14mtqjT2FiPMzrZ3tr70S6vbtyj/9c31z9BlDoF5kGfEVyUOo1/8TSVdR5F4AR1TmWsOLlnUj3wCWWEwZ6NYOnONIN+GoPX5EcNIyr5cqTjyjJ4Ic90y9nHXEgBv3q/llpb4bffxd6d3srbsaXWI4bOwNmX6XgDwCFv0iFaxhXy1fnHMmI6hBhI/sSVrzHv/X3QBQIBWKJZUlNNX+RChcvXS06oZ7129/h9GNGZ77oWmIJJz6g66LeUgyHnwQnXatefkAp/EUqWKqa/6q2NznqkH3SXwDam2HtvbD6bm+Z5XhdP3FAN1yj4A84hb9IBUucrgoMPde/b239hCWW43X92VcVdR2eUh9s95vCX6SE5RqA8emq8QtAiEFq/u3NA4M/aevE1ugEmnoPYGZ0FA3Z/UoZKZcb7Pyk8BcpUfkIwMTF1oZc3yfV2voNC/o2UylmIJfDDXZ+U/iLlKjEAOzuiXLLk3/mq3OOzOoCkNFrhlhbv1CBnOrTTTncYOc3hb9Iiardq4aQGc45osCzW95gVdubhetxx9bW/+uax3k+cjTjDzq5X2mnEIGc7tNE2uWhJWMKf5ES1LqtixsfXk/UOSx2d1bOPe4nboCX7ofa8TDnmykHbFujE/hss9fDr2lt6nehKUQgD/ZpohS2Sgwyhb9IARR6JkpiKIYMwiHvE0BWPe6WJfD7f4Vdf/cev/Ma/Ow0+MJjAy4AQ5V28h3IKu8UjsJfJM/SlSpyvSAkvj45FK8/a3J2m7H8+lKvt5/MRbwB3qTwL3YYD/VpQtM9s6fwF8mzVL1jyG3DlFQXlJxKLO3N8OytsOnh1M9bOO2yDPOm1eFiX4sRuOk+TWi6Z24U/iJ5lqp3PJyZMKl6s6leP9wF5Vq3dfHq6mf4+HtPcNCWX3uzdlI55Fg4898H9PqTw3betLqM37sQNN0zNwp/kTxLV6rIpFySrjeba7mldVsXD93xHa4P/ZwQEZx5Szn0kyb044IWthoPyI3CX6QAkksVyRcEgMXPbBlQskkXsFnPpGlZAhuX43aP5frQ3VQR8TZKB8AgVAWjj4PjF/Sbs59K0MJW0z1zo/AXKZJ4iA9Wqx4sYIc1kyZW03exmn4DXuCbgXPgLIw1LmTTh87kqV31zDxo6OUYghi2mu6ZPYW/SJGlGxCOh2omnxAGFVtX3/W+B25P4GMhogChEKEzb6b1oLNjF6HNGQ+YKmzLh8JfpMiSe/e1e9UM+CRw5clHZD+bJbaufnxnrvhOrS/WXUzjUeP6VtxsemZLoGr4UlwKf5EcZDPPPLl8kq7On9EAa3szrL0Hdu2AvQ/2NkSPravvIt30OFgfHccyTuEzp34dEl4ftBq+FJfCXypetjcK5TLPPLl8kiqEhwznliXwyD/1X2lz9VJY9DAsXIG1rWTryCk8t6uez6T43YJYw5fiUfhLRcslwIeq3Wf7SSCjtXLam+GRr/UPfui/scrYGUwEJg7x3gr9yuRb+JvZacCtQBi4wzn3Pb/aIpUrl7nrmdTus/0kMNTx5LX1Y2V9bIgN07UcgsT5Ev5mFgYWA6cCHcAqM1vhnNvgR3ukcuVS9860dp+z+EbpHxgF73V64d5X198NDiLAM66RsWdey8Q0N2kN9SlHF4bK4lfPfwawxTm3FcDM7gPOBhT+UlS51r0zqd3npG/P3Ahe/z4EVSO8vXIXPcy6R2/jT+1v8evIbNZyJF/bVZ+2zDPYxUnr5FQev8J/DNCe8LgD+EjySWZ2GXAZwGGHHVaclknFyVfdO+8DqCn3zI32q+t3nzaBb9/RRA9DX3AG+5QTtKUbpPD8Cv8By4qwp2y554BztwO3AzQ2Ng54XiRo8nIhiZd53m6HaNKALqF+G6YP54Iz2Lma9ll5/Ar/DmBswuM64DWf2iJScEPW0xPr+o9d4/XuQ1UQroJIL4RCcMKXYeS+fTdpxQ3ngjPYwLKmfVYWv8J/FTDBzMYDfwXmAxf51BaRghqynt5X1496Ie+i3r8o0LAQ9qsbEPj5aFOqqaUK/crhS/g753rN7MvA7/Cmev7MObfej7aIFFrT1k5290RxQHdPUj09ua4fdRAKA+aVd6ZcmNfQBw3uise3ef7OuUeBR/16fykfQZ+iWLtXTd+AVjT2eE9dv6P/jVqhMJxx854pnXkOftDgrnh0h6+UtFLoxXa9203I4B/sKU4PN+NWHQdvLYvV9cMQqoZoD1jIC/6kdfXzfXHT4K6Awl9KXCn0YmcePoqfh7/Lx8IveQd2vIQzw3AQhR1HXsCW7lpqjz6FiY1z+r0224vbYBeMfA3uBv0TlwxO4S8lrRR6sQ1/vgVX9VK/tfUdhlmIaKia/7VhIs29R1CzpYelB3dltLPXYDK5YOQ6uFsKn7hkcAp/KWmBnaL4xA2wcQVMmut9JWFTFeDvx17G6IMPZlnneJpfGJE23LO5uBXj01ApfOKSwSn8peQVY4pixiWO+Gqbf4uVeJ69BQ5t6NtYBYO3D/0Yo+f9GwDjt3VR09qUNtyzubgV49NQKXziksGZc6Vx42xjY6NraWnxuxlSgTIucbQ3w5IzIdLt7ZdLbN/cAw7f8wlg0lw49VsDfn6+P7nEf2btXjV0vdtdkE9FqvmXBjNrdc41Jh9Xz19kCBmXONpWQqQHiAV/rF+1fcwnGX3qtwaEflwhPrnEf14h6/K6Kay0hfxugEjQxUscYWNgiaNlCfzyHO9r/WwIV/ftm+uAByOzWHbApb60O91mMyKgnr9IWolljevPmsxv123n9GNG7+nttiyBh7/iff/K03DWrbDoEd744895cuPr/Lr3RNaFJ7LUp3q46vIyGIW/SAr3vPAXrl++jkjUETKwkDHFbeZv2zaxKXQ+E6fPgY3L+79o43JoXMRBF87gyG1dnLy1k2t9rIcHdiaUBILCXyRJ67Yurl++jt6oV7SPOPg/LOUfqx/BcEQf/RUc8luYdLbX44+bdHbft0GphwelHRI8Cn+RBK3burjlyT/3Bf80+zOXhR/mU2FvppkZhFwvPHsrzF/qvWjjci/4k5ZlKFZ71bOXbCj8RWISp3TGQ39O+EXCeAuvWWwLIgPYud170LjIl9AH3WUruVH4SyAEoQfbtLWTyZFNXBp+mDnhVsLm9f7j8/UT74ix4xf40cR+dJet5ELhL74LSg/2E3u3cUn1d6jBW1u/316jFqIjcgDvuhHczel85qCzaSh6C/vTbB7JhcJffOd7D7ZlCWxczsTqvXAWGbjBtIV55ohruGTdZKIOwgaHDNHGYnyS0WweyYXCX3znSw82vpnK++946+/EmIXBRWIPQnDUGTDrK+wXnUDNpvRr8CRKnCY6orqwn2Q0m0eypfAX3xW9B9veDHfN9TZTIWltq0OnwujjAOu3hWIDZNTG5GmiA7ZtFAkIhb8EQmIPtqAlk/Zm+P13IbI7tn1i0gonxy9IO3snk15209ZOogmLJYZCplq8BJLCXwommxAvyOBvezOsvRd2vQ4vPw6RXiDqlXXCI+Ajl8Pf/pSXufrxElZ3b5SQGTeefYx6/RJICn8piGxDPO+Dv+3NsOQsr6cPCUsth7DDT4KTrs3rJukahJVSofCXgsg2xPM++Nu2Mlbbj3EQweimim0Tr2RiHoM/ToOwUgoU/lIQ2YZ43nvO9bMhXAOR3TighzD3R07ioehsTt5Vz8TcfrpIydJOXlIwRb9rNz59s352/1JOezOv/3EJW3fs4t9fn0ZrZALVg5SignC3sUi+aCcvKbqilT/iA7qr74ZoxOvpL1zRdwFojU7gsxs+TXdvlKpwiAtm1DFvWl3a4E83VqGLgpQThb+Utvic/d736ZuzH+n2PgHEwj9x/CESiTJm/w+kDe90YxWt27q48Kd7bvK691ItoialTds4Sk5at3Wx+JkttG7rKs4btjfDypu9r5AwoBsvX5rX86+f3feSQbdhTJLu3GUvdtDdG8UB3b1Rlr3YUZjfT6RI1POXrOU6J3/YZZTEO3PjpZ2+Ad1uCFXB8RfBlIv61fyHGkRObkeqc5NHxkpjpEwkPYW/ZC2XOflZXTjivXwX2VPamX2VdxFINdCbIN34Q7p2JJ87b1odv2pppyfiqA4b86bVZfR7igSVwl+ylsuc/KwuHIm9/MTSztgZWd+olWk7GsbVcu9lJ2jAV8qGwl+ylsuc/EEvHOmmbI6dkVEvfziGcwHTzVtSTgo2z9/MvglcCuyIHfoX59yjseeuBb4IRID/7Zz73VA/T/P8y0/Kmn+qun6OIT/U2IKmcEo582ue//91zv0wqSFHA/OBycChwJNmdqRz8UXUpVL060nHe/tvdwys6yeE/3CDOl1NP/nnKPSl0vhR9jkbuM85txt41cy2ADOA531oiwRBYm8/FPZm7UQZMGUzm0HiVDV9oOjbRurThQRNocP/y2a2AGgBrnLOdQFjgKaEczpix6TS9PX22/f09qNAwwLYb+yAun42g8SpavrF3jYyKHsUiyTKKfzN7EngkBRPXQf8J/BtvCnR3wZuBr4AA7dIJc20aTO7DLgM4LDDDsulqRI0/Xr7VV6PP97bT5qnH5fN7KJ0g9LF3DbS9z2KRVLIKfydc3MyOc/Mfgo8HHvYAYxNeLoOeC3Nz78duB28Ad/sWyqBkba3vxD2qxtyrn42s4uSa/rFXnPflz2KRYZQyNk+o51z22Pf/xPwEefcfDObDNyDV+c/FHgKmDDUgK9m+5SweOB/YBQ8ds2e3j4u5UJs5Ug1f/GLH7N9vm9mU/FKOm3A5QDOufVmdj+wAegFrtRMnzKVvNqmmbdvrotm3NsvF5pRJEFTsPB3zl08yHM3ATcV6r0lAFKttulCEArRt/jalAvLPvRFgkp3+EphpFpts2oEnPY9eK+zInr7IkGm8JfCyGC1TRHxj8JfclPEdXiGokFVkcwp/MuEL8E31Do8Oay2OVy6kUpkeBT+ZcC34Eu1vv4QYV+oi5RupBIZHoV/GfAt+NKtr59GIS9SupFKZHgU/mWg4MEXn6+P6z9oO8y6fiEvUsW+a1ek1Cn8y0Chgq91Wxevrn6Gc/90ORbtBsC9uJTQ5x/pfwHIsK5f6IuUbqQSyZzCv0zkO/gef2wFO55dwtG0Qagbiy3HF4308Nc1jzMmi4Fc9c5FgkPhLwNsWvUkH3/+89SEevuOxZeA6iXM85GjOS/Ln63euUgwKPxlgK4NT3MkvX29/YiDl9yHWR+t5zf2ca4+/mR/GygiOVP4ywC1R59Cz9b/osZ5PX8XqmHkGd/nrV31XK1yjUhZUPjLABOnz2ET/03Pi/dwyL4jOOjEzzNx7AwmZvnzdOetSPAo/CWlidPnwPSM9uoZlO68FQmmkN8NkPKWbgN1EfGXwl8KKj63P2zozluRAFHZRwpKc/tFgknhLwWnuf0iwaOyTzlqb4aVN3tfRURSUM+/XMQ3VfnAKHjsmvRr7GdBUzVFyo/CvxwkbqpiBtEoEM14jf3BaKqmSHlS2accJG6q4qJELUSUMNFQ9ZBr7A9FUzVFypN6/uUgYVOVaKiaG7o/x75uJ62RyVwdnUBDDj9amx11xVcAAAYnSURBVKSIlCeFf4BkXVtP2FRlWed4lr4wgqiDsJHzhimaqilSnhT+AZFYW68KhzivoY550+oAMgve2KYq47d1UdPalNeeuqZqipQfhX9AJNbWu3uj3PvCX/hVSztT7WWms54fPD2Zqy9ZMGQIq6cuIplQ+AdEvLa+uyeKAxxwTHQzd9X8K9X00sODPLJ6LA3jzh3yZ6mnLiJDUfgHRLzHvuzFDl5ufYrpbGCMvUE1vVRZFFwvJ4Q3AEOHv4jIUBT+AdIQepmG6nuJ1twN0V4IVQFVRKMRQlXVjJn6Sb+bKCJlQuEfFPEbtXrfJ0Rsw1wXgYYFsN9YbzpnjnfqiojEKfyDIn6jVjz4MW/u/pSLFPoikncK/6BIuFGLUBUcf5GCX0QKJqflHczsfDNbb2ZRM2tMeu5aM9tiZpvN7FMJxxvM7KXYcz8yM8ulDWUjfqPWKdfBoofhrFsU/CJSMLn2/NfhTT/5r8SDZnY0MB+YDBwKPGlmRzrnIsB/ApcBTcCjwGnAb3NsR3mI3aglIlJoOfX8nXMbnXObUzx1NnCfc263c+5VYAsww8xGA/s65553zjngF8BncmmDiIgMX6FW9RwDtCc87ogdGxP7Pvl4SmZ2mZm1mFnLjh07CtJQEZFKNGTZx8yeBA5J8dR1zrnl6V6W4pgb5HhKzrnbgdsBGhsb054nIiLDM2T4O+fmZPFzO4CxCY/rgNdix+tSHBcRkSIqVNlnBTDfzEaY2XhgAtDsnNsO7DSzmbFZPguAdJ8eRESkQHKd6nmOmXUAJwCPmNnvAJxz64H7gQ3AY8CVsZk+AFcAd+ANAr+CZvqIiBSdeZNugq+xsdG1tLT43QwRkZJiZq3Oucbk49rDV0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAIp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQwl9EpAKVf/i3N8PKm72vIiIC5L6Hb7C1N8NdcyHSDeEab4N07ZErIlLmPf+2lV7wu4j3tW2l3y0SEQmE8u75188mGqqGCBCqJlQ/2+8WiYgEQlmHf2t0Aj/o/hca3HpaI5O5OjqBBr8bJSISAGUd/k1bO2nuPYImdwRh8x43jKv1u1kiIr4r65r/zMNHUVMVImxQXRVi5uGj/G6SiEgglHXPv2FcLUsvmUnT1k5mHj5KvX4RkZiyDn/wLgAKfRGR/sq67CMiIqkp/EVEKpDCX0SkAin8RUQqkMJfRKQCKfxFRCqQOef8bkNGzGwHsM3vdhTYgcAbfjciQPT36E9/j/709+gv3d9jnHPuoOSDJRP+lcDMWpxzjX63Iyj09+hPf4/+9Pfob7h/D5V9REQqkMJfRKQCKfyD5Xa/GxAw+nv0p79Hf/p79Desv4dq/iIiFUg9fxGRCqTwFxGpQAr/gDGzH5jZJjP7k5k9aGb7+90mP5nZ+Wa23syiZlax0/rM7DQz22xmW8zsGr/b4ycz+5mZvW5m6/xuSxCY2Vgze8bMNsb+W/lKJq9T+AfPE8AxzrnjgD8D1/rcHr+tA84F/uB3Q/xiZmFgMXA6cDRwoZkd7W+rfLUEOM3vRgRIL3CVc24SMBO4MpP/fyj8A8Y597hzrjf2sAmo87M9fnPObXTObfa7HT6bAWxxzm11znUD9wFn+9wm3zjn/gC86Xc7gsI5t90592Ls+53ARmDMUK9T+AfbF4Df+t0I8d0YoD3hcQcZ/MctlcfM6oHjgReGOrfst3EMIjN7EjgkxVPXOeeWx865Du/j3NJits0Pmfw9KpylOKY52tKPme0N/Br4qnPunaHOV/j7wDk3Z7DnzWwhcBbwCVcBN2IM9fcQOoCxCY/rgNd8aosEkJlV4wX/Uufcskxeo7JPwJjZacA/A3Odc+/63R4JhFXABDMbb2Y1wHxghc9tkoAwMwPuBDY65/4909cp/IPnx8A+wBNmtsbMbvO7QX4ys3PMrAM4AXjEzH7nd5uKLTYB4MvA7/AG8+53zq33t1X+MbN7geeBo8ysw8y+6HebfDYLuBg4JZYZa8zsjKFepOUdREQqkHr+IiIVSOEvIlKBFP4iIhVI4S8iUoEU/iIiFUjhLyJSgRT+IiIV6H8Ak8En/idUJxAAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# Vis\n", "fig, ax = plt.subplots()\n", @@ -385,7 +302,7 @@ }, { "cell_type": "code", - "execution_count": 282, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -395,7 +312,7 @@ }, { "cell_type": "code", - "execution_count": 290, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -410,7 +327,7 @@ }, { "cell_type": "code", - "execution_count": 284, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -425,7 +342,7 @@ }, { "cell_type": "code", - "execution_count": 285, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -448,7 +365,7 @@ }, { "cell_type": "code", - "execution_count": 286, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -461,7 +378,7 @@ }, { "cell_type": "code", - "execution_count": 287, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -471,22 +388,9 @@ }, { "cell_type": "code", - "execution_count": 288, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5xV5X3v8c93LsxwmwFhGGAAuQQvgIoRjT1eYjRGYtNg2pME28RLzCFa2yZNetGkl7Sntj1trK1tY2qiVV/xElu12h7NkUMuxhOjDoqioAKCMIDMIMJwHZiZ3/ljrYEt7mGGubBn9vq+X9mvvfaz1trrWWC+e/GsZz2PIgIzM8uGkkJXwMzMjh2HvplZhjj0zcwyxKFvZpYhDn0zswxx6JuZZYhD36wISfqJpC8Wuh428Dj0bcCQtE7SRwtdD7Ni5tA360OSygpdB7MjcejboCDpf0haLWmbpMckTUzLJekWSY2Sdkh6WdKcdN2lklZI2ilpo6Tf6+S7r5L0/yT9Y/odr0m6KGd9taQ7JG1Ov+cvJJUetu8tkrYB38zz/SWSbpC0RtI7kh6UdFy6bqqkkLRI0qb0GF/L2bdC0t+n6zalyxU56xdIWiapOf3++TmHPj6t205JT0oa26u/BCsKDn0b8CRdCPwV8BlgAvAW8EC6+mPA+cAJwCjgs8A76bo7gC9FxEhgDvCjIxzmQ8CbwFjgT4GHO4IZuBtoBT4AnJ4e84t59h0H3JTnu38HuAz4MDAReBf458O2+QgwM/3uG3Kaub4BnA3MBU4DzgL+KP1zOQu4B/j99NzPB9blfOevA1en9RoC5P3Rs4yJCL/8GhAvksD6aJ7yO4C/yfk8AjgATAUuBN4gCcaSw/ZbD3wJqOriuFcBmwDllD0HfB6oBVqAoTnrLgd+nLPv+i6+fyVwUc7nCWn9y9JzCOCknPV/A9yRLq8BLs1ZdwmwLl3+F+CWTo75E+CPcj7/JvDDQv8d+1X4l6/0bTCYSHJ1D0BE7CK5mq+LiB8B/0Ry5bxF0u2SqtJNfw24FHhL0k8l/dIRjrExInJHH3wrPe7xQDmwWdJ2SdtJwnZczrYbuqj/8cAjOfuvBNpIflDyfUfHsd937oetm0zyo9CZt3OW95D8WFrGOfRtMNhEEpwASBoOjAE2AkTErRFxBjCbpJnn99Py5yNiAUlA/wfw4BGOUSdJOZ+npMfdQHKlPzYiRqWvqoiYnbNtV0PVbgA+nrP/qIiojIiNOdtMznPs9537Yes2ADO6OLbZezj0baApl1SZ8yoD7gOuljQ3vYn5l8CzEbFO0pmSPiSpHNgN7APaJA2R9BuSqiPiANBMcnXdmXHA70gql/Rp4GTg8YjYDDwJ3CypKr0pO0PSh4/inL4D3CTpeABJNZIWHLbNH0saJmk2STv8D9Ly+4E/SvcZC/wJ8P103R3pn8tFab3qJJ10FPWyDHLo20DzOLA35/XNiFgC/DHwELCZ5Op2Ybp9FfBdkpujb5E0+3wrXfd5YJ2kZuBa4HNHOO6zJDdSt5LcjP3vEdFxQ/gKkhuhK9Lj/DtJu3x3/QPwGPCkpJ3AL0hu/ub6KbAaWAJ8KyKeTMv/AqgHXgaWAy+kZUTEcyQ/ELcAO9LvOB6zI9B7mzHNskfSVcAXI+LcAhx7KrAWKI+I1mN9fMseX+mbmWWIQ9/MLEPcvGNmliG+0jczy5ABPzjU2LFjY+rUqYWuhpnZoLJ06dKtEVFzeHmXoS9pMsn4HuOBduD2iPgHSX8L/Aqwn+SpwKsjYnvaG2El8Hr6Fb+IiGvT7zoDuAsYStI178vRRfvS1KlTqa+v7845mplZStJb+cq707zTCnwtIk4mGd/kekmzgMXAnIg4lWTskxtz9lkTEXPT17U55bcBi0j6Q88EckcENDOzftZl6EfE5oh4IV3eSXIVXxcRT+b0K/4FMOlI3yNpAsnAV8+kV/f3kIw8aGZmx8hR3chNm25OJ3l6MdcXgCdyPk+T9GI6yNV5aVkd0JCzTUNalu84iyTVS6pvamo6miqamdkRdDv0JY0geQz+KxHRnFP+DZImoHvTos3AlIg4HfgqcF866qF4v7zt+RFxe0TMi4h5NTXvuw9hZmY91K3eO+lgVg8B90bEwznlVwKfIBkrPAAiooVkVEIiYqmkNSQjHzbw3iagSRwaLdDMzI6BLq/00+Fm7wBWRsTf5ZTPB/4Q+GRE7Mkpr8mZSm46yQ3bN9PRCndKOjv9ziuAR/v0bMzM7Ii6c6V/DslohcslLUvLvg7cClQAi9NhyDu6Zp4P/LmkVpKhbK+NiG3pftdxqMvmE7z3PoCZmfWzLkM/Ip4mf3v8451s/xBJU1C+dfUkc5X2u7t/vo7jhg/hV06b2PXGZmYZUbTDMDzw/AYeXbax6w3NzDKkaEO/tqqCt5v3FboaZmYDStGG/viqSt7e0VLoapiZDShFG/q1VZW8s7uFA23tha6KmdmAUdShHwFNO321b2bWoWhDf3x1BYDb9c3MchRt6I8bWQlAo0PfzOygog398dVJ6L+9w6FvZtahaEP/uGFDKC8VW9ymb2Z2UNGGfkmJGDeyki2+0jczO6hoQx/8gJaZ2eGKOvTHV1eyxaFvZnZQUYf+uJGVbGl2m76ZWYeiDv3x1ZXsamllV0tr1xubmWVAUYd+bVXygJabeMzMEkUe+klffffgMTNLFHXoj+8I/Z0OfTMz6N4cuZMl/VjSSkmvSvpyWn6cpMWSVqXvo3P2uVHSakmvS7okp/wMScvTdbemc+X2m44rfQ+xbGaW6M6VfivwtYg4GTgbuF7SLOAGYElEzASWpJ9J1y0EZgPzgW93TJQO3AYsIpksfWa6vt8MryhjZEWZ2/TNzFJdhn5EbI6IF9LlncBKoA5YANydbnY3cFm6vAB4ICJaImItsBo4S9IEoCoinomIAO7J2affjKuqcOibmaWOqk1f0lTgdOBZoDYiNkPywwCMSzerAzbk7NaQltWly4eX96vx1ZV+KtfMLNXt0Jc0AngI+EpENB9p0zxlcYTyfMdaJKleUn1TU1N3q5hXbVUljX5Ay8wM6GboSyonCfx7I+LhtHhL2mRD+t6YljcAk3N2nwRsSssn5Sl/n4i4PSLmRcS8mpqa7p5LXrVVyVAM7e15f1/MzDKlO713BNwBrIyIv8tZ9RhwZbp8JfBoTvlCSRWSppHcsH0ubQLaKens9DuvyNmn34yvqqS1Pdi2Z39/H8rMbMAr68Y25wCfB5ZLWpaWfR34a+BBSdcA64FPA0TEq5IeBFaQ9Py5PiLa0v2uA+4ChgJPpK9+1fFU7ts79jF2REV/H87MbEDrMvQj4mnyt8cDXNTJPjcBN+UprwfmHE0Fe+vgU7nN+5hTV30sD21mNuAU9RO5cGjaRI+2aWaWgdAfO6ICCXfbNDMjA6FfXlrC2BEVHnTNzIwMhD4kN3M96JqZWUZCf3xVJW/7St/MLBuhX1tVSeNO38g1M8tM6G/bvZ+W1rauNzYzK2KZCP2OyVQ8Bo+ZZV0mQr+2+tADWmZmWZaN0O8YisGhb2YZl4nQPzhXrpt3zCzjMhH61UPLGVJW4uYdM8u8TIS+JPfVNzMjI6EPSROPr/TNLOsyE/qeIN3MLEOhn1zptxDhaRPNLLsyE/q1VZXsPdBG877WQlfFzKxgshP6fkDLzKxbE6PfKalR0is5ZT+QtCx9reuYO1fSVEl7c9Z9J2efMyQtl7Ra0q3p5OjHzPgqh76ZWXcmRr8L+Cfgno6CiPhsx7Kkm4EdOduviYi5eb7nNmAR8AvgcWA+x2Bi9A65E6SbmWVVl1f6EfEUsC3fuvRq/TPA/Uf6DkkTgKqIeCaSO6n3AJcdfXV7rmOCdA+xbGZZ1ts2/fOALRGxKqdsmqQXJf1U0nlpWR3QkLNNQ1qWl6RFkuol1Tc1NfWyionK8lKqh5b7St/MMq23oX85773K3wxMiYjTga8C90mqAvK133fadzIibo+IeRExr6amppdVPGR8VaUHXTOzTOtOm35eksqAXwXO6CiLiBagJV1eKmkNcALJlf2knN0nAZt6euyeqq2upNGhb2YZ1psr/Y8Cr0XEwWYbSTWSStPl6cBM4M2I2AzslHR2eh/gCuDRXhy7R2pHVvhK38wyrTtdNu8HngFOlNQg6Zp01ULefwP3fOBlSS8B/w5cGxEdN4GvA74HrAbWcAx77nQYX11J084WWtvaj/WhzcwGhC6bdyLi8k7Kr8pT9hDwUCfb1wNzjrJ+faq2qpL2gHd27z/Ym8fMLEsy80QuHOq26R48ZpZVmQp9P5VrZlmXqdDveCrXoW9mWZWp0B8zooLSErkHj5llVqZCv7REjBtZ4QnSzSyzMhX6AOM8baKZZVjmQn98VYV775hZZmUu9OtGDaPh3b20t3vaRDPLnsyF/rSa4ew90MaWnb7aN7PsyVzozxg7HIC1TbsLXBMzs2Mvc6E/rSYJ/Te3OvTNLHsyF/q1IysZWl7Km77SN7MMylzol5SIaWOHs3brrkJXxczsmMtc6EPSxLPWzTtmlkGZDP3pY4ez4d297G/1uPpmli3ZDP2a4bS1B+u37Sl0VczMjqlMhv60sSMA3MRjZpnTnekS75TUKOmVnLJvStooaVn6ujRn3Y2SVkt6XdIlOeVnSFqerrs1nSu3IKaNSbttNvlmrpllS3eu9O8C5ucpvyUi5qavxwEkzSKZO3d2us+3OyZKB24DFpFMlj6zk+88JqqHlTNm+BBf6ZtZ5nQZ+hHxFLCtq+1SC4AHIqIlItaSTIJ+lqQJQFVEPBMRAdwDXNbTSveF6TXD/YCWmWVOb9r0f0vSy2nzz+i0rA7YkLNNQ1pWly4fXp6XpEWS6iXVNzU19aKKnZs2drgf0DKzzOlp6N8GzADmApuBm9PyfO30cYTyvCLi9oiYFxHzampqeljFI5s2dgRbd7XQvO9Av3y/mdlA1KPQj4gtEdEWEe3Ad4Gz0lUNwOScTScBm9LySXnKC2Z6OgbPOjfxmFmG9Cj00zb6Dp8COnr2PAYslFQhaRrJDdvnImIzsFPS2WmvnSuAR3tR716bPrajB49D38yyo6yrDSTdD1wAjJXUAPwpcIGkuSRNNOuALwFExKuSHgRWAK3A9RHRln7VdSQ9gYYCT6SvgpkyZhgl8mibZpYtXYZ+RFyep/iOI2x/E3BTnvJ6YM5R1a4fVZSVMmn0MHfbNLNMyeQTuR2SHjx+QMvMsiPzob92626SRwfMzIpfpkN/Rs1w9uxvo3FnS6GrYmZ2TGQ69DsGXlvjJh4zy4hsh37aV983c80sKzId+hOqKqksL2Gt++qbWUZkOvRLSsTUMZ460cyyI9OhDx5t08yyxaE/dgTrt+3hQJvnyzWz4pf50J82Npkvd4PnyzWzDHDo13jgNTPLjsyHfsdom76Za2ZZkPnQHzVsCMcNH+KbuWaWCZkPffDAa2aWHQ59Dg28ZmZW7Bz6JH31G3e2sKultdBVMTPrVw59cm7mugePmRW5LkNf0p2SGiW9klP2t5Jek/SypEckjUrLp0raK2lZ+vpOzj5nSFouabWkW9O5cgeEjtE239zqdn0zK27dudK/C5h/WNliYE5EnAq8AdyYs25NRMxNX9fmlN8GLCKZLH1mnu8smOPHDENyt00zK35dhn5EPAVsO6zsyYjoaAD/BTDpSN8haQJQFRHPRDJN1T3AZT2rct+rLC+lbtRQP6BlZkWvL9r0vwA8kfN5mqQXJf1U0nlpWR3QkLNNQ1qWl6RFkuol1Tc1NfVBFbvmHjxmlgW9Cn1J3wBagXvTos3AlIg4HfgqcJ+kKiBf+32nE9NGxO0RMS8i5tXU1PSmit02o2aE58s1s6LX49CXdCXwCeA30iYbIqIlIt5Jl5cCa4ATSK7sc5uAJgGbenrs/jC9Zji7Wlp5u3lfoatiZtZvehT6kuYDfwh8MiL25JTXSCpNl6eT3LB9MyI2AzslnZ322rkCeLTXte9Dc+qqAXhpw/YC18TMrP90p8vm/cAzwImSGiRdA/wTMBJYfFjXzPOBlyW9BPw7cG1EdNwEvg74HrCa5F8AufcBCm72xCqGlJbwokPfzIpYWVcbRMTleYrv6GTbh4CHOllXD8w5qtodQxVlpZw8sYpl6x36Zla8/ERujtMnj2L5xh20ehYtMytSDv0ccyePYs/+Nt7Y4idzzaw4OfRzzJ08CoBlbtc3syLl0M9x/JhhjB5WzrIN7xa6KmZm/cKhn0MSp00e5St9MytaDv3DzJ08ilWNu9i570Chq2Jm1ucc+oc5fcpoImB5w45CV8XMrM859A8zd1JyM9cPaZlZMXLoH6Z6WDnTxw53u76ZFSWHfh5z05u5HnHTzIqNQz+PuVNG0bSzhU07POKmmRUXh34eBx/S8jg8ZlZkHPp5nDS+iiFlJby43g9pmVlxcejnMaSshDkTq3wz18yKjkO/E3Mnj2b5xh0c8IibZlZEHPqdOH3KKFpa23n97Z2FroqZWZ9x6Hei42auH9Iys2LSnekS75TUKOmVnLLjJC2WtCp9H52z7kZJqyW9LumSnPIzJC1P192azpU7YE0aPZSxI4a4B4+ZFZXuXOnfBcw/rOwGYElEzASWpJ+RNAtYCMxO9/l2x0TpwG3AIpLJ0mfm+c4BRVL6kJZ78JhZ8egy9CPiKWDbYcULgLvT5buBy3LKH4iIlohYSzIJ+lmSJgBVEfFMJI+53pOzz4A1d/Io1jTtZsdej7hpZsWhp236tRGxGSB9H5eW1wEbcrZrSMvq0uXDywe0uZOTVquXG9zEY2bFoa9v5OZrp48jlOf/EmmRpHpJ9U1NTX1WuaN16uRqJD+Za2bFo6ehvyVtsiF9b0zLG4DJOdtNAjal5ZPylOcVEbdHxLyImFdTU9PDKvZeVWU5M2pGuAePmRWNnob+Y8CV6fKVwKM55QslVUiaRnLD9rm0CWinpLPTXjtX5OwzoJ3uETfNrIh0p8vm/cAzwImSGiRdA/w1cLGkVcDF6Wci4lXgQWAF8EPg+ohoS7/qOuB7JDd31wBP9PG59Iu5U0axbfd+NmzbW+iqmJn1WllXG0TE5Z2suqiT7W8CbspTXg/MOaraDQAfnJLczP3F2neYMmZYgWtjZtY7fiK3CyeNH8nE6koWr9hS6KqYmfWaQ78LkvjorFp+tqqJvfvbut7BzGwAc+h3w8Wzatl3oJ2nV28tdFXMzHrFod8NH5o2hpEVZSxe8Xahq2Jm1isO/W4YUlbCBSeNY8nKRtra3XXTzAYvh343XTyrlnd27/cUimY2qDn0u+mCE2soL5V78ZjZoObQ76aqynLOnj7GoW9mg5pD/yhcPKuWN7fuZnXjrkJXxcysRxz6R+GjJ9cC+GrfzAYth/5RmDhqKHPqqtx108wGLYf+Ubr45PG8uGE7TTtbCl0VM7Oj5tA/ShfPqiUClqx0E4+ZDT4O/aN08oSR1I0a6nZ9MxuUHPpHSRIXz6rl6dVb2bO/tdDVMTM7Kg79HvjYrFpaWtt56g0PwGZmg4tDvwfOnHYcVZVlbuIxs0HHod8D5aUlXHjSOH702hZa29oLXR0zs27rcehLOlHSspxXs6SvSPqmpI055Zfm7HOjpNWSXpd0Sd+cQmFcPGs87+45wNK3PACbmQ0ePQ79iHg9IuZGxFzgDGAP8Ei6+paOdRHxOICkWcBCYDYwH/i2pNLeVb9wPnxiDUPKSnjspU2FroqZWbf1VfPORcCaiHjrCNssAB6IiJaIWAusBs7qo+MfcyMqylhw2kQefmEj2/fsL3R1zMy6pa9CfyFwf87n35L0sqQ7JY1Oy+qADTnbNKRl7yNpkaR6SfVNTU19VMW+d81509h7oI37nltf6KqYmXVLr0Nf0hDgk8C/pUW3ATOAucBm4OaOTfPsnncaqoi4PSLmRcS8mpqa3lax35w0vopzPjCGe37+Fgd8Q9fMBoG+uNL/OPBCRGwBiIgtEdEWEe3AdznUhNMATM7ZbxIw6BvErzl3Gm837+Px5ZsLXRUzsy71RehfTk7TjqQJOes+BbySLj8GLJRUIWkaMBN4rg+OX1AXnDCO6TXDufPptUR4/lwzG9h6FfqShgEXAw/nFP+NpOWSXgY+AvwuQES8CjwIrAB+CFwfEW29Of5AUFIirj5nGi817HD3TTMb8HoV+hGxJyLGRMSOnLLPR8QpEXFqRHwyIjbnrLspImZExIkR8URvjj2Q/NoH66geWs4dT68tdFXMzI7IT+T2gWFDyvj1D03h/7z6Nhu27Sl0dczMOuXQ7yNX/NLxlEjc9fN1ha6KmVmnHPp9ZEL1UC49ZQI/eH4DO/cdKHR1zMzycuj3oWvOncaullYerG8odFXMzPJy6Peh0yaPYt7xo7nr52tpa3f3TTMbeBz6feyac6exYdteFq94u9BVMTN7H4d+H/vY7PFMPm4oty5Z7bH2zWzAcej3sdISccP8k1mxudk9ecxswHHo94NLTxnPhSeN4+Yn36DhXffbN7OBw6HfDyTx5wtmA/Anj77qMXnMbMBw6PeTSaOH8bWPncCPXmvkf3sETjMbIBz6/eiq/zaVOXVV/Nl/rmDHXj+wZWaF59DvR2WlJfz1r57KO7ta+F8/fK3Q1TEzc+j3tzl11Vx9zjTue3Y9z6/bVujqmFnGOfSPga9efAJ1o4by9YeXs7/VfffNrHAc+sfA8Ioy/udls1nVuIt/+emaQlfHzDLMoX+MXHhSLb986gT+YckqnnqjqdDVMbOM6u10ievSqRGXSapPy46TtFjSqvR9dM72N0paLel1SZf0tvKDzV/96inMrB3Jtd9fyssN2wtdHTPLoL640v9IRMyNiHnp5xuAJRExE1iSfkbSLGAhMBuYD3xbUmkfHH/QqKos5+6rz2T0sCF84a7neeud3YWukpllTH807ywA7k6X7wYuyyl/ICJaImItsBo4qx+OP6CNq6rknmvOoq09uOLO59i6q6XQVTKzDOlt6AfwpKSlkhalZbUdk6Gn7+PS8jpgQ86+DWnZ+0haJKleUn1TU/G1f8+oGcEdV53JluZ9XP2vz7O7pbXQVTKzjOht6J8TER8EPg5cL+n8I2yrPGV5B6WJiNsjYl5EzKupqellFQemD04ZzT//+gdZsbmZa7+/1F05zeyY6FXoR8Sm9L0ReISkuWaLpAkA6XtjunkDMDln90nApt4cf7C76ORa/vJTc/jZqq384UMve7YtM+t3PQ59ScMljexYBj4GvAI8BlyZbnYl8Gi6/BiwUFKFpGnATOC5nh6/WHz2zCl87eITeOTFjVz1r8/x7u79ha6SmRWxsl7sWws8Iqnje+6LiB9Keh54UNI1wHrg0wAR8aqkB4EVQCtwfUS09ar2ReK3L5pJzcgK/uTRV/nEPz7Ndz53BqdMqi50tcysCGmgj/U+b968qK+vL3Q1jomXNmznuu8vZevu/fzFgjl85szJXe9kZpaHpKU5XekP8hO5A8hpk0fxn799LmdOHc0fPPQyNz68nJZW/2PIzPqOQ3+AGTOignu+8CF+84IZ3P/cej7znWdY3bir0NUysyLh0B+ASkvEH8w/ie987gze3LqbS/7+Kb7xyHKadvpBLjPrnd7cyLV+Nn/OeM6cOppbl6zi3mfX8x8vbuTaD8/gi+dNZ+iQTI1gYWZ9xFf6A9yYERX82YI5PPm753PezBpuXvwGF3zrxzz4/Ab36zezo+beO4NM/bpt3PT4Sl5cv52J1ZV8et5kPj1vEpNGDyt01cxsAOms945DfxCKCJ5csYV7n13Pz1YlYxOd+4GxfPbMyVw8q5aKMjf9mGWdQ79INby7h3+rb+Df6jewacc+Rg8r59JTJvDhE2r4pRljGFlZXugqmlkBOPSLXFt78PTqrfzg+fX85PUm9uxvo6xEfHDKaM6bOZbzTqjhlLpqSkvyjXtnZsXGoZ8hLa1tvPDWdn62qomnVjXxysZmAEZWlDG7rorZE6uZU1fFnInVTK8Z4R8CsyLk0M+wd3a18PTqrTy7dhuvbmrmtc3NtKRDOQ8tL+XE8SOZNnY4x48ZxvFjhjHluGR5zPAhpGMrmdkg49C3g1rb2lnTtJtXNu7g1U3NrNzczFvv7GZz8z5y/3MYPqSU8dWVjBtZSc3ICsaNrGBcVQXjRlZy3PAhjBpWTvXQckYNHcLIyjJK/C8GswGjs9D3w1kZVFZawonjR3Li+JH82hmHyvcdaKPh3b289c5u3npnD+u37WFL8z4ad7bw4oZ3aWxuOfgvhMNJyRzAVUPLGD6kjOEV6WtIKcMryhg2pJTK8lIqy0qoKE+Xy0uoLCulvKyEIaUlDCkT5aXJcnlZCeUlJZSWiPJSpe/J57ISUVIiSiVKS9P3ElEiUaLkiWb/C8UsP4e+HVRZXsoHxo3gA+NG5F0fEexsaaWxuYV39+xnx54DbN97gO179tO8N1lu3nuA3fvb2N3Syo69B9i0fS97WlrZvb+NfQfaOv3R6A8lIvkhKBEiWZYOvQvQYcvJP1aSsmSJdP2h7Ui35eDye9/Tb3jf+o7vO1Se/4fpiD9Xnaw82p+4wfajOLhq23f+63fO7fMu2A596zZJydV8L7qBRgQtre20HGhnX2vyQ3CgrZ39rZG8t7VzoLWdlrZ22tqC1vZ2WtuD1rZI35PP7RG0tee8ImhvD9oD2iN5jzi0juR/tLdH8h5BpNsEJMskZYcedO7Y5tC6pLTjXJLy9xS+d5Hc5tP3lnfy59PFn93R7tM3OxRWDLYK9yH1w8+dQ9+OKUlp004p1fgZArNjzWPvmJllSG/myJ0s6ceSVkp6VdKX0/JvStooaVn6ujRnnxslrZb0uqRL+uIEzMys+3rTvNMKfC0iXkgnSF8qaXG67paI+FbuxpJmAQuB2cBE4P9KOsHz5JqZHTs9vtKPiM0R8UK6vBNYCdQdYZcFwAMR0RIRa4HVwFk9Pb6ZmR29PmnTlzQVOB14Ni36LUkvS7pT0ui0rA7YkLNbA538SEhaJKleUn1TU1NfVNHMzOiD0Jc0AngI+EpENAO3ATOAucBm4OaOTfPsnrcvVkTcHhHzImJeTU1Nb6toZmapXoW+pHKSwL83Ih4GiIgtEeNnfSgAAAN0SURBVNEWEe3AdznUhNMATM7ZfRKwqTfHNzOzo9Ob3jsC7gBWRsTf5ZRPyNnsU8Ar6fJjwEJJFZKmATOB53p6fDMzO3o9HnBN0rnAz4DlQMez9V8HLidp2glgHfCliNic7vMN4AskPX++EhFPdOM4TcBbPaokjAW29nDfwcznnS0+72zp7nkfHxHvax8f8KNs9oak+nyjzBU7n3e2+Lyzpbfn7SdyzcwyxKFvZpYhxR76txe6AgXi884Wn3e29Oq8i7pN38zM3qvYr/TNzCyHQ9/MLEOKMvQlzU+Hb14t6YZC16c/peMbNUp6JafsOEmLJa1K30cf6TsGoyMM7V3U5y6pUtJzkl5Kz/vP0vKiPm8ASaWSXpT0X+nnoj9nAEnrJC1Ph6qvT8t6fO5FF/qSSoF/Bj4OzAIuT4d1LlZ3AfMPK7sBWBIRM4El6edi0zG098nA2cD16d9zsZ97C3BhRJxG8hDkfElnU/znDfBlktF8O2ThnDt8JCLm5vTP7/G5F13ok4z1szoi3oyI/cADJMM6F6WIeArYdljxAuDudPlu4LJjWqlj4AhDexf1uUdiV/qxPH0FRX7ekiYBvwx8L6e4qM+5Cz0+92IM/W4P4VzEajuGvkjfxxW4Pv3qsKG9i/7c02aOZUAjsDgisnDefw/8AYeGfIHiP+cOATwpaamkRWlZj8+9GCdG7/YQzjb4HT60dzIOYHFLZ5ubK2kU8IikOYWuU3+S9AmgMSKWSrqg0PUpgHMiYpOkccBiSa/15suK8UrfQzjDlo7RTtP3xgLXp1/kG9qbjJw7QERsB35Cck+nmM/7HOCTktaRNNdeKOn7FPc5HxQRm9L3RuARkibsHp97MYb+88BMSdMkDSGZl/exAtfpWHsMuDJdvhJ4tIB16RedDe1NkZ+7pJr0Ch9JQ4GPAq9RxOcdETdGxKSImEry/+cfRcTnKOJz7iBpeDoHOZKGAx8jGa6+x+delE/kSrqUpA2wFLgzIm4qcJX6jaT7gQtIhlvdAvwp8B/Ag8AUYD3w6Yg4/GbvoHaEob2fpYjPXdKpJDfuSkku2h6MiD+XNIYiPu8OafPO70XEJ7JwzpKmk1zdQ9Icf19E3NSbcy/K0Dczs/yKsXnHzMw64dA3M8sQh76ZWYY49M3MMsShb2aWIQ59M7MMceibmWXI/wfjwkNpQlIdhQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "plt.plot(loss_history)\n", "plt.title('Loss per epoch');" @@ -494,22 +398,9 @@ }, { "cell_type": "code", - "execution_count": 289, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3df3hcZZ338fd3Jg0FKVIKCktKU6TQUqDQpKVY6iW/tApP+SULFKQsKi6y1+LKw/PgosLqshe7wrq61uVB6hbXFhZcoBUUUHC1AiFNKBVKWyklNRHUEkptt9AkM9/njzOTTqYzyUzmx5kfn9d19UrnzJmZu1E+557vfZ/7NndHRETqSyTsBoiISPkp/EVE6pDCX0SkDin8RUTqkMJfRKQOKfxFROqQwl9EpA4p/KWqmVmXmfWZ2cFpx18wMzez5sTjJjP7LzN708y2m9mLZnZl4rnmxLk70/5cnGMb/snMus3sT2a2xcxuSnv+LjPbaGbx5GemPLfIzDoTr+1JvFdDls852sxWmNlWM3vLzB43s2Ny/V2JpFL4Sy14Dbg0+cDMjgf2TTvnP4BuYBIwAbgC+EPaOQe6+/4pf/4zx89fAkx19wOADwILzeyClOfXAp8Dns/w2v2AzwMHAycDZwD/O8vnHAisBI4B3g+0AytybKPIEAp/qQX/QRDmSYuA76edMwtY6u7/4+4D7r7G3X9SjA93943u/j8ph+LAUSnPL3b3J4F3M7z239x9lbv3ufvvgGXA3Cyf0+7uS9z9LXfvB74BHGNmE4rx75D6ovCXWtAGHGBm08wsClwM/CDDOYvN7BIzOyKfNzezhWb26xHOudHMdgI9wHuA5fl8RooPAevyOPf37t47ys+SOqbwl1qR7P2fBWwAfpf2/EXAKuDLwGuJMYFZaee8aWZvp/yZBuDuy939hOE+3N1vA8YBMxNt2Z7vP8DM/gJoBW7P4dwmYDHwhXw/RwQU/lI7/gNYCFzJ3iUf3H2bu9/o7tMJ6uUvAA+bmaWcdrC7H5jyZ30+DfDAGuAd4O/yea2ZnQfcBnzM3d8c4dxDgCeA77j7vfl8jkiSwl9qgrtvIRj4/Tjw4AjnvknQu/4z4KASNKcB+ECuJ5vZfOC7wP9y9xdHOHc8QfCvdPdbC2ql1DWFv9SSTwGnpw2+AmBm/2hmx5lZg5mNA64BNhVaLzeziJl91szGW2A2cC3wZMo5jWY2FjBgjJmNNbNI4rnTCQZ5L3T39hE+6wDgceBpd7+xkHaLKPylZrj7q+7ekeXp/YCHgLeBzQRTPheknfN22jz/LwCY2WVmNtwg7PnAq8AOgoHmf038SXqCoBT0QeCuxN8/lHjuy8B7gR+nfO7gLCQz+4mZ/W3K58wC/iKtnXkNYIsAmDZzERGpP+r5i4jUIYW/iEgdUviLiNQhhb+ISB3KuHpgJTr44IO9ubk57GaIiFSVzs7ON939kPTjVRP+zc3NdHRkm8UnIiKZmNmWTMdV9hERqUMKfxGROqTwFxGpQ1VT88+kv7+fnp4e3n13rz0yasrYsWNpampizJgxYTdFRGpEVYd/T08P48aNo7m5maEr89YOd6e3t5eenh4mT54cdnNEpEZUddnn3XffZcKECTUb/ABmxoQJE2r+242IlFdVhz9Q08GfVA//RpFq07llG4t/vonOLdvCbsqoVHXZR0QkDJ1btnHZ3W30DcRpbIiw7NNzaJk0Puxm5aXqe/6V5JZbbuH227Nvv/rwww/z8ssvl7FFIlIKbZt76RuIE3foH4jTtrmgPYFCofAvI4W/SG2Yc+QEGhsiRA3GNESYc+SEsJuUt7oL/2LX6W699VaOOeYYzjzzTDZu3AjAd7/7XWbNmsWMGTO48MIL2bVrF8888wwrV67khhtu4MQTT+TVV1/NeJ6IVL6WSeNZ9uk5fOEjx1RlyQfqLPyTdbo7ntjIZXe3FXwB6Ozs5L777mPNmjU8+OCDrF69GoALLriA1atXs3btWqZNm8aSJUv44Ac/yIIFC/j617/OCy+8wAc+8IGM54lIdWiZNJ5rTzuqKoMf6mzAN1OdrpD/4VatWsX555/PfvvtB8CCBcGWsC+99BJf+tKXePvtt9m5cycf/ehHM74+1/NERIqtrnr+pajTZZqGeeWVV/Ltb3+bF198kZtvvjnrHP1czxMRKba6Cv9i1+k+9KEP8dBDD/HOO++wY8cOfvSjHwGwY8cODjvsMPr7+1m2bNng+ePGjWPHjh2Dj7OdJyJSanVV9oHgAlCsGt3MmTO5+OKLOfHEE5k0aRLz5s0D4Gtf+xonn3wykyZN4vjjjx8M/EsuuYTPfOYzfOtb3+KHP/xh1vNERErN3D3sNuSktbXV0zdzWb9+PdOmTQupReVVT/9WESkeM+t099b043VV9hERkUBRwt/MvmdmfzSzl1KOHWRmPzWzVxI/x6c890Uz22RmG81MU1xERMqsWD3/pcD8tGM3Ak+6+xTgycRjzOxY4BJgeuI13zGzaJHaISIiOShK+Lv7L4G30g6fC9yT+Ps9wHkpx+9z993u/hqwCZhdjHaIiEhuSlnzf7+7vwGQ+Pm+xPHDge6U83oSx0REpEzCGPDNtDh9xilHZna1mXWYWcfWrVtL3CwRkfpRyvD/g5kdBpD4+cfE8R5gYsp5TcDrmd7A3e9y91Z3bz3kkENK2NTKsf/++4fdBBGpA6UM/5XAosTfFwErUo5fYmb7mNlkYArQXsJ2hC4Wi4XdBBGRIYo11fNe4FngGDPrMbNPAbcBZ5nZK8BZice4+zrgfuBl4DHgWncvXzp2t8OqO4KfRdDV1cXUqVNZtGgRJ5xwAp/4xCfYtWsXzc3NfPWrX+XUU0/lgQce4NVXX2X+/Pm0tLQwb948NmzYAMBrr73GKaecwqxZs/jyl79clDaJiIykKMs7uPulWZ46I8v5twK3FuOz89LdDvcsgFgfRBth0UqYWPhEo40bN7JkyRLmzp3LVVddxXe+8x0Axo4dy69+9SsAzjjjDO68806mTJnCc889x+c+9zmeeuoprrvuOq655hquuOIKFi9eXHBbRERyUV93+HatCoLfY8HPrlVFeduJEycyd+5cAC6//PLBwL/44osB2LlzJ8888wwXXXQRJ554Ip/97Gd54403AHj66ae59NLg2vnJT36yKO0RERlJfS3s1jwv6PEne/7N84rytunLOicfv+c97wEgHo9z4IEH8sILL+T0ehGpPp1bttG2uZc5R04o3gYv3e1BJ7V5XlGqFKnqq+c/cXZQ6jn9pqKVfAB++9vf8uyzzwJw7733cuqppw55/oADDmDy5Mk88MADALg7a9euBWDu3Lncd999AFrWWaRKFXuXQGBPmfqpW4OfRRqnTKqv8Icg8OddX9Sr6LRp07jnnns44YQTeOutt7jmmmv2OmfZsmUsWbKEGTNmMH36dFasCCY/ffOb32Tx4sXMmjWL7du3F61NIlI+mXYJLFiJytRJ9VX2KZFIJMKdd9455FhXV9eQx5MnT+axxx7b67WTJ08e/NYAcOONN5akjSJSOsldAvsH4kXbJbBUZeokhb+ISIGSuwS2be5l/H6Ngz3/gmr/yTJ1iWr+Cv8CNTc389JLL418oohUlXwHcJPnXHZ3G30DcRobIoVvFztxdtFDP6nqw9/da362TLXstiZSK5IDuPmGeKbaf9Fm/hRZVQ/4jh07lt7e3poOR3ent7eXsWPHht0Ukbox2gHcZO0/ahSv9l8iVd3zb2pqoqenh1pf8XPs2LE0NTWF3QyRujHaAdzU2n9R5/uXQFVv4C4iUioluWkrhDZk28C9qnv+IiKl0jJp/OhDvwh35o523CFXCn8RkWLqWAo/vh48DtF9Rr2aQKkHjxX+IiLF0N0Oa5dD5/eDu3IBYruDbwCjCP+S3DiWQuEvIlKoZG8/PjD0uEVGfWduqQePFf4iIqORrOvvOyFz8Eca4ON3FHSTVkHjDiNQ+IuI5Cu1rm8RiKdsRmhRaFkEMy4t2d25xaDwFxHJR3f70J6+A5HongvBx++A1ivDbGFOFP4iErpKmFOfs65VEI/veRxJBP47vSVZgK1UFP4iEqpSz2cvuuZ5xJNLLVuESJX09NNV9do+IlL9SrIRSjF0t8OqO/baQaszPoWFfX/LHQMXsXDgK3Qecm5IDSyMev4iEqp857OXpUSU3EIxuZFKyo1abZt7aR84ijY/iqhR0St3DkfhLyKhymc+e9lKRJm2UEyEf6lvvioXhb+IhG6k+ezJ3v7rb79TnvXyh9lCsZpW7hyOwl9EKlpqb78hYjREI8RiJe51j7CFYilvvioXhb+IlF0+dfvUAeFY3Ll49kQOP3Df0ve689hCsaqmqiYo/EWkrPKt26fX2C+c2VRRAVt1U1UTFP4iUlb5LlVc6TX2atq3N5XCX0TKajSzZSq5xl6ts3+0jaOIlF16jTzXmnm288KuuYf9+cPRNo4iUjFSe/K51syznVcJNfdK/maSjZZ3EJFQ5bq8Q7bz2jb3Mj22gb+MrOC42IbKWR6iwpW8529mXcAOIAYMuHurmR0E/CfQDHQBf+7u20rdFhGpPLnWzDOe193On//+3/n0mPuJEqefBrbsfzxwVHn/EVWo5DX/RPi3uvubKcf+CXjL3W8zsxuB8e7+f4d7H9X8RWrXqGr+kVeC9XcG3sVxDHCLYqffBPOuL1/jK1yl1fzPBT6c+Ps9wH8Dw4a/iNSuXGvmQ8575F4YeBcSwQ+GpS3FINmVI/wdeMLMHPh/7n4X8H53fwPA3d8ws/dleqGZXQ1cDXDEEUeUoakiUtG622Htcti5FX7zGEG8AJExMPOTFb91YiUpR/jPdffXEwH/UzPbkOsLExeKuyAo+5SqgSJS4ZKh//wPIN6f9qTBzMvhnG+E0rRMKnnqZ1LJw9/dX0/8/KOZPQTMBv5gZoclev2HAX8sdTtEpEolN0uPxxjs6Q8yaBgLMxaG0LDMKmHqaS5KOtXTzN5jZuOSfwc+ArwErAQWJU5bBKwoZTtEpEoN2Sw9LfgjY4LtE1M2WqkEFbszWZpS9/zfDzxkZsnPWu7uj5nZauB+M/sU8FvgohK3Q0RyUHHliq5V4CmbpVsUjvkY7P++iq3vV8tyDyUNf3ffDMzIcLwXOKOUny0i+SlHuSLrxaVjKaxfAdPOHboZevM8iO4Dsd1gEaiCzdIrfSG6JC3vIFLHUsO41KtTZr24dCyFR64LTnr1qeBnMuBH2FSlUlXDcg8Kf5E6lR7GXzlneknLFVkvLmu+P/TENd8f2rvPY1MVyZ3CX6ROpYfxtl19JS1XpNbCZzVs4ryda6D7IzDusKEnpj+WklD4i9SpTAOTpSxXtEwaz8MLxtD//HKm/+FHRJ6Pwdpvw/zbiP/mcSzej0fGEJl7XUk+X4ZS+IvUqdSByfH7NQ5OSSxG+Gcc2O1uZ+rjlw8uyQBArI/fvdHD9f1fpsXX0WnTuSE+hZaCWyAjUfiLFKDipkbmKdnmYs7yyTqw27UKYn3sma9vEG3k2dixtA/sQ5sfRdSomm0Qq53CX2SUquVOzpE8+HwPu/vjOMWZ5ZNcX//kyHraY9No2zwleL/meRBtDC4AkSicdDnMuJTJ8Sk0drZV/Lz4WqPwFxmlat24O1Xnlm080NE92BePRgsP3zP27+KqMf/AGAaGrq+fZdpmC1TFvPhao/AXGaVquZNzOG2bexmIB9FvwLRDxxX2ht3tTN2wGLd+DCdqMaa+uxY4M3g+y7TNapgXX2sU/iKjVC13cg4neQFLfoP5dc92Lru7bXQlrO72YHOV2G4MByJaX7+CaQ9fkQK0TBrPtacdVZXBD3suYHOPOjjYCYs8FiPrbodVdwQ/Yc+ArseDpRg+8OGKW3RN9lDPX6TOtUwaz+fPPJrVXW/lVsLqboe198KaHwTLLEcbg5BPHdCNNsKHv6jgr2AKf5E6l5yueuUpzax740987LjDsn+TGVxbf2DPsVhf0Oufd31VrsNTrxT+InUsdbpq3INB39Vdb3HMoeP2vgAMWVs/KZirP1jX1zo8VUM1f5E6ljpdFUao+WdaWz/EzVQ6t2xj8c830bllW9k/uxao5y9SxQq9wzh9tk8Estf8c1hbv1x3PNfKDXZhUviLVKliBGD6+j7bdvVlD+4R1tYvZyDXwg12YVP4i1Sp1ADs64/zLz/7DZ8/8+hRXQBaJo2Hn94ML94PXZPhzFsyl3KGqemXKpAzfZuohRvswqbwF6lS4/drJGKGuxMHnt70Jqu73sq/x92xFP77H2DnH4LHf3odvjcfrnosY9BnK+2UIpCzfZuohRvswqbwF6lCnVu28dVH1hF3xxJ3Z42qx/1fnwl6++k8FpR38ijtlCKQh/s2oSUhCqPwFymBUg98poZixCAaCb4B5Nzj7m6Hp78JGx7J/LxFMy7LMFJpp9iBrPJO6Sj8RYosW++40AtC6uvTQ/Er50wffrA2qbsd1i6HNcuDWTuZHHo8nP3PGUs+5Q7jkb5NVPt+CmFS+IsUWabeMRS2YUqmC0reJZbBu3Nj7NlQJcUwoZ/qwplNeOJnOQI327cJTfcsjMJfpMgy9Y7zmQmTqTeb6fU5LSjXsRTWr4BDTyD+zLcxH8AGnzSINMBhJ8BJV+w1Zz9Tu1LD9sKZTfn8WopO0z0Lo/AXKbJspYpcyiXZerN5l1vSavr+6lPgYAbu4BYl0roIZlya8925lRa2Gg8ojMJfpATSSxXpFwSAxT/ftFfJJlvA5jWTJrmu/sA7Qw47xoCDE2HVlBs5/Zz/k9e/qdLCVtM9C6PwFymTZIgPV6seLmBznkkzuFH60Mr+kvg5bPf96LTp3HDqFUB+A6aVGLaa7jl6Cn+RMss2IJwM1Vy+IQxKzt7ZuRX2f19Qxkmsq++xPvrjsC4+iQc5nWnnXEf/rj5uSLzXaAZMFba1Q+EvUmbpvfvx+zXuFcLXnnbUyOHcsRQe/ZuhK22uWQZXPgKLVtL21MPcvvEQOuNHEzX4wq4+rj3tqMFTK62GL+WlJZ1FCjCaZYWT5ZMvfOQYln16Dtt29WX9JpDpOBD0+B/9wtDghz0bq0ycTeOHb2BddCpRy7xSZ/IilO15qW3q+UvdG+2NQoXMM08vn2Sq8w87wJq+tn5SysYqI9XoK7GGL+Wj8Je6VkiAj1S7z+dCkCmE9zoeeQVWJZZTHtwvN3mXrsHUs2HudUOmbo5Uo1cNv36FFv5mNh/4JhAF7nb328Jqi9SvQureudTuR/tNYPB45BVa/mc5PLsVXnli6IbpVz4Ca+9l6453+cW+ZzH5pNNomTj852k5BEkKJfzNLAosBs4CeoDVZrbS3V8Ooz1SvwqZu57eMy/6AGp3Oyw9e3Da5qCUDdM741P2XHA624a94Iz0LUcXhvoSVs9/NrDJ3TcDmNl9wLmAwl/KqtC6dy61+7x1twfhvr0bYv1pTw7dMD2fC85w52qdnPoTVvgfDnSnPO4BTk4/ycyuBq4GOOKII8rTMqk7xap7F3QhSQb+vhPgsRuD3n2kASJRiA8E50TGwMzLYcbCwbp+Pt9chjtX0z7rT1jhbxmO7bXMoLvfBdwF0NrammEZQpHKMqoLyeBqm3GIRIJZPB6HONCyiOA/Dcu4Dk8+F5zhzq20pRuk9MIK/x5gYsrjJuD1kNoiUnJZ6+nd7YngT/Tu4x709pPlnRwWXsvngpN1YFnTPutOWOG/GphiZpOB3wGXAAtDaotISWWsp0deSdT1e4bO149E4eN3wDu9QV0/xxU3R9OmTFNLFfr1I5Twd/cBM/sr4HGCqZ7fc/d1YbRFpNTaNveyuz/OSfYbTomvZ/uvXoTX/jlR148Gtfx4P1gkCP4R1tUvlAZ3BUKc5+/uPwZ+HNbnS+2o9CmK4/dr5PaGxZwffToY2HolCuYpdf0r4L0TS9rTT6XBXQHd4StVrhp6sUe/eDst0aeBYLN1JwZEg03So41DZu9kUuyLmwZ3BRT+UuUqtheb3D5x2rkct/0XQLCLFgAO2444g/FT5kDzPDrjU2jLsmzzaC9uw10wijW4W+nfuGR4Cn+pahXZi/3pzfD0vwR/f/Upxh55Or6jC09MVo4R4cmDLuUT8y4YMdxHc3HL5YJR6OBuNXzjkuFpSWepaunLI4ceQB1L9wR/0u7t/P74v+QNP4jn4lO5PH4Lk086DRhh2WZGt+zySO9ZDOX4DCkt9fyl6pVjimJOJY7knP104w7jsAv/kc7WG+nc3Du4kxaM/M1lNCWacnwbqshvXJIXc6+OG2dbW1u9o6Mj7GZIHRq2xJFS2+edXnjy74H44O3qblEiVz1W1gHd1Pccv18j23b1laQur5p/dTCzTndvTT+unr/ICLLW3TuWwiPXBSe9+hTM/Tw07IMP7Cbm8GRsJv9uC7ghPoWWYd6/FN9cku9Xyrq8bgqrbgp/kRFkLXGsXzH0xN//OuPeuWHNQKrYmVBSETTgK5JFcn9egHtOeJln9/sb1oy9hpbfJAZ0p5079AXTzs1p79xy0R69Mhz1/EUyWP7cb/nKipc4wTdyVMMjfCSyZ7zJn/6XYFnas/4uOJCs+SeWZaiURdIqpR1SmTTgK5Kmc8s2brvrHj5lP+LM6PNECRZeS96k5YAddCT89ZrwGimSIw34iuSgc8s2Xnj4G9zb8K9DQt+dwZu0MGDagtDamEozbmS0FP4iCZ1btvHw3X/PLZElRPA9PX2HOMbOhvHs2wCNrVfsKfmESHfZSiEU/lIRQu3BJubq++6JfCXyg8HgT3b04xal+4Nfo/eYhXvaWN4WZqTZPFIIhb+ELtQebMpc/RaCFZaTZR7MsKlnE517Hb3xKRXXy9ZdtlIIhb+ELpQebHLD9A2PDh4yIGKRoNIfiRA5e8/GKm0/35RXG8vxTUazeaQQCn8JXVl7sN3tsHY5rFke7JtrQ291sbl/jY09YK+NVfJpY3KaaCzu7DOmtN8SdJetjJbCX0JXlh5sdzusvRfW/ABi/eyp6ANTz4H+XUPm6o+2jZ1btvGVFS8xEA/ev69ftXipTAp/qQipPdiil0w6lgarbcYH0p6wYCetudfltH1iLr3sts29xFPunYlETLV4qUgKfymZ0YR40Qd/k8ssDwl+g+gYOOlymHFpUffNTZaH+gbiRMz46rnHqdcvFUnhLyUx2hAv+uBv16pgo/SEuEVZd+h5jJm5kKmzzhz9+2ahQVipFgp/KYnRhnhBg7/JGTypg7XN8yC6D8R2EyfCzQNXsqzrdBp7+ln2vm0lCWcNwko1UPhLSYw2xEfVc04dzI3Hgjr+opXBBWDi7ODvXat4sHcyy57bRzdFiaDwlxIppPyRV8+5ux3uWQAD7zI4gyfWF3wDSPb+J86mMz6FNW/20BDpJhb3YS9IWi9H6oHCX0qmLOWPrlVB2JOy6lq0MSj3JKSOPzREI1w8eyIXzmzK2Lbhxip0UZBaovCX6pJe12+eF4R9rA8iDXDSQpixcMgMntTxh1gszuEH7ps1vLONVXRu2cal320bLGPd+5nwl3cQKYTCXwpS1t5wssQT6xta10/U9NPvyk3KZ/wh27kPPt9D30Awa6hvIM6Dz/co/KWqKfxl1Aqdk5/3hSNZ4vHY0Lp+8k8WI40/pLcj07npWx5VxxZIItkp/GXUCpmTP6oLR2qJJ62uP5Js4w/Z2pF+7oUzm/hhRzf9MWdM1LhwZlPOny1SiRT+MmqFzMkf9sKRab4+5FTiyVeuF7CWSeO59+pTNOArNUPhL6NWyHTOrBeObHX9pBFKPPnK5wKmm7eklpQs/M3sFuAzwNbEob919x8nnvsi8CkgBvy1uz9eqnZIaY02EPe6cERegVWrYHtP5rp+AYYbW9ByDFKvSt3z/4a73556wMyOBS4BpgN/BvzMzI5291iJ2yIVpiXyCi0Nq2DrBHjsxsR0zWgwZTNOxrp+voPE2Wr66e+j0Jd6E0bZ51zgPnffDbxmZpuA2cCzIbRFwpJa3jGDeByIB6HfcgW8d+Jedf3RDBJnqukDZd+SUTeISaUpdfj/lZldAXQA17v7NuBwoC3lnJ7EMakHycHc7d17yjtEIBIBT9ydm3aTVtJoZhdlqumXe9vIUPcoFsmioPA3s58Bh2Z46ibg34CvEUyJ/hpwB3AVwVap6TJOmzazq4GrAY444ohCmiqVILW3H2kISjzJ8s782+Cd3mFn8YxmdlG2mn45Nz4PZY9ikREUFP7untOC6Gb2XeCRxMMeYGLK003A61ne/y7gLoDW1lbdV1OtBnv7KYO5caBlEby3Kedpm6MdnE2v6Zd7kLesexSL5MjcS5OpZnaYu7+R+PvfACe7+yVmNh1YTlDn/zPgSWDKSAO+ra2t3tHRUZK2SomkL7UciQIW7KqVaRpnDVPNX8JiZp3u3pp+vJQ1/38ysxMJSjpdwGcB3H2dmd0PvAwMANdqpk8NyrTU8jCDubVOM4qk0pQs/N39k8M8dytwa6k+WypAtqWWswzmikh56Q5fKY0clloWkfAo/KUwZVyHR0SKR+Evo1fmdXhGokFVkdwp/GtEKMGXbX39EOhGKpH8KPxrQFmCL1N5ZxTr65fqIqUbqUTyo/CvASUPvo6l8OPrg/V3GvbZU97Js65fyouUbqQSyY/CvwaULPi622Htcuj8fmINHmBg99DyTh51/VJepLQ0s0h+FP41oCTB17GU+KPXgw9gBAsyOeAWIZLH9ompSt07141UIrlT+NeIogZfdzvxR6/H4gOYgXtwc26MKF+NXcl58Sm0jLKN6p2LVAaFv+zldy88waHxGJFE8MeIcF/sNB6MzWMtR3NoAeUa9c5FKoPCX/bybOxYzmYMY7wfJ8LNA1fyQzuLGBpMFakVCn/Zy+STTuMvOr9Ei6+j3Y/l/AUXcOGh41SuEakhJVvSudi0pHN5FXM+vu68FQlPGEs6SxUrVm1ed96KVKZI2A2Q2pZtA3URCZfCX0oqObc/amiwWKSCqOxTi7ItsxwCze0XqUwK/1oz0jLLIdDcfpHKo/CvFcne/vbuillmWUQql8K/FqT29iMNEIkG6zHkuMzySDRVU6T2KPxrQeqmKnGgZRG8t6koNX9N1RSpTZrtUxKlioUAAAZFSURBVAuSm6pYFKKNbHj/2SweOJfO+JSC31pTNUVqk3r+tSBlU5UNY2dw3sp++gY2FqWnrk1SRGqTwr+CZKyt5zptM7GpypM/30TfwMaibZiiqZoitUnhXyFSa+sN0QifaGniiqY/cPRjlw1O24xc+aMRa/il6KlrqqZI7VH4V4jU2vpxsQ0c2PEQzz//JkdF+miwOAMDfbzxwhMcPkL4q6cuIrlQ+FeIOUdOYHbDJs7xX3BR9BdEiRMjQowIOPTTwLOxY/lEDu+lnrqIjEThXyFaIq+wvPEfILYbc8cSm+beHz+d3/kEOm06N5x0WtjNFJEaofCvFF2riMT7AccN4hiRhkZmzr+GbTubuUElHBEpIoV/pUjO1Y/1YZEG7KSFMGMhUyfOZmrYbRORmqPwrxQpc/UrYTVOEaltCv9KkpirLyJSagUt72BmF5nZOjOLm1lr2nNfNLNNZrbRzD6acrzFzF5MPPctM7NC2iAiIvkrdG2fl4ALgF+mHjSzY4FLgOnAfOA7ZhZNPP1vwNXAlMSf+QW2QURE8lRQ+Lv7enffmOGpc4H73H23u78GbAJmm9lhwAHu/qy7O/B94LxC2iAiIvkr1aqehwPdKY97EscOT/w9/XhGZna1mXWYWcfWrVtL0lARkXo04oCvmf0MODTDUze5+4psL8twzIc5npG73wXcBdDa2pr1PBERyc+I4e/uZ47ifXuAiSmPm4DXE8ebMhwXEZEyKlXZZyVwiZntY2aTCQZ22939DWCHmc1JzPK5Asj27UFEREqk0Kme55tZD3AK8KiZPQ7g7uuA+4GXgceAa909lnjZNcDdBIPArwI/KaQNIiKSPwsm3VS+1tZW7+joCLsZIiJVxcw63b01/bj28BURqUMKfxGROqTwFxGpQwp/EZE6pPAXEalDCn8RkTqk8BcRqUMKfxGROqTwFxGpQ7Uf/t3tsOqO4KeIiAC1vodvdzvcswBifRBtDDZI1x65IiI13vPvWhUEv8eCn12rwm6RiEhFqO3wb54X9PgtGvxsnhd2i0REKkJtl30mzg5KPV2rguBXyUdEBKj18Ac641NoGziIOfEJtITdGBGRClHT4d+5ZRuX3d1G30CcxoYIyz49h5ZJ48NulohI6Gq65t+2uZe+gThxh/6BOG2be8NukohIRajp8J9z5AQaGyJEDcY0RJhz5ISwmyQiUhFquuzTMmk8yz49h7bNvcw5coJKPiIiCTUd/hBcABT6IiJD1XTZR0REMlP4i4jUIYW/iEgdUviLiNQhhb+ISB1S+IuI1CFz97DbkBMz2wpsCbsdJXYw8GbYjagg+n0Mpd/HUPp9DJXt9zHJ3Q9JP1g14V8PzKzD3VvDbkel0O9jKP0+htLvY6h8fx8q+4iI1CGFv4hIHVL4V5a7wm5AhdHvYyj9PobS72OovH4fqvmLiNQh9fxFROqQwl9EpA4p/CuMmX3dzDaY2a/N7CEzOzDsNoXJzC4ys3VmFjezup3WZ2bzzWyjmW0ysxvDbk+YzOx7ZvZHM3sp7LZUAjObaGY/N7P1if9WrsvldQr/yvNT4Dh3PwH4DfDFkNsTtpeAC4Bfht2QsJhZFFgMfAw4FrjUzI4Nt1WhWgrMD7sRFWQAuN7dpwFzgGtz+f+Hwr/CuPsT7j6QeNgGNIXZnrC5+3p33xh2O0I2G9jk7pvdvQ+4Dzg35DaFxt1/CbwVdjsqhbu/4e7PJ/6+A1gPHD7S6xT+le0q4CdhN0JCdzjQnfK4hxz+45b6Y2bNwEnAcyOdW/PbOFYiM/sZcGiGp25y9xWJc24i+Dq3rJxtC0Muv486ZxmOaY62DGFm+wP/BXze3f800vkK/xC4+5nDPW9mi4BzgDO8Dm7EGOn3IfQAE1MeNwGvh9QWqUBmNoYg+Je5+4O5vEZlnwpjZvOB/wsscPddYbdHKsJqYIqZTTazRuASYGXIbZIKYWYGLAHWu/s/5/o6hX/l+TYwDvipmb1gZneG3aAwmdn5ZtYDnAI8amaPh92mcktMAPgr4HGCwbz73X1duK0Kj5ndCzwLHGNmPWb2qbDbFLK5wCeB0xOZ8YKZfXykF2l5BxGROqSev4hIHVL4i4jUIYW/iEgdUviLiNQhhb+ISB1S+IuI1CGFv4hIHfr/5P5imyOW8LcAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# Vis\n", "fig, ax = plt.subplots()\n", From 5ade64432b60b9aec20cacd520557a9199c40e2f Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:07:35 -0300 Subject: [PATCH 05/18] Update lecture 03 --- 03-modules-and-mlps.ipynb | 232 ++++++++++++++++++++++++++++++-------- 1 file changed, 184 insertions(+), 48 deletions(-) diff --git a/03-modules-and-mlps.ipynb b/03-modules-and-mlps.ipynb index ae91458..150bb6f 100644 --- a/03-modules-and-mlps.ipynb +++ b/03-modules-and-mlps.ipynb @@ -1,5 +1,24 @@ { "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Why Modules\n", + "\n", + "A typical training procedure for a neural net:\n", + "\n", + "0. Define a dataset (what is X and Y)\n", + "1. Define the neural network with some learnable weights\n", + "2. Iterate over a dataset of inputs\n", + "3. Pass inputs to the network (forward)\n", + "4. Compute the loss\n", + "5. Compute gradients w.r.t. network's weights\n", + "6. Update weights (e.g. weight = weight - lr * gradient)\n", + "\n", + "PyTorch handles 1-6 for you via encapsulation, so you still have the flexibility to change something in between if you want! " + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -47,6 +66,19 @@ "from IPython.core.debugger import set_trace" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import torch\n", + "\n", + "np.random.seed(0)\n", + "torch.manual_seed(0);" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -77,19 +109,10 @@ "- VOC\n", "- Cityscapes\n", "\n", - "`Dataset` gives you information about the number of samples (implement `__len__`) and gives you the sample at a given index (implement `__getitem__`.\n", + "`Dataset` gives you information about the number of samples (implement `__len__`) and gives you the sample at a given index (implement `__getitem__`).\n", "It's a nice and simple abstraction to work with data." ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from torch.utils.data import Dataset" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -115,6 +138,17 @@ "metadata": {}, "outputs": [], "source": [ + "from torch.utils.data import Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# download MNIST and store it in \"../data\"\n", + "# PyTorch.datasets also handles caching for you so you don't have to download the dataset twice\n", "train_data = datasets.MNIST('../data', train=True, download=True)\n", "test_data = datasets.MNIST('../data', train=False)\n", "\n", @@ -179,7 +213,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Each sample is a 28x28 matrix. But we want to represent them as vectors, since our model doesn't take any advantage of the 2-d nature of the data.\n", + "Each sample is a 28x28 matrix. But we want to represent them as vectors, since our model (a simple MLP) doesn't take any advantage of the 2-d nature of the data.\n", "\n", "So, we reshape the data:" ] @@ -210,7 +244,8 @@ "outputs": [], "source": [ "train_x_vectors = train_x.view(-1, num_features)\n", - "test_x_vectors = test_x.view(n_test_examples, -1)" + "test_x_vectors = test_x.view(n_test_examples, -1)\n", + "print(train_x_vectors.shape, test_x_vectors.shape)" ] }, { @@ -235,7 +270,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Oops! Notice that the arrays had integer values, but the result of the division would be floats. One way to change the `dtype` of a torch tensor is using `.to()`.\n", + "Oops! Notice that the arrays had integer values, but the result of the division would be floats. One way to change the `dtype` of a torch tensor is using `.to(torch.dtype)`. Check here for the complete list of supported data types: https://pytorch.org/docs/stable/tensors.html\n", "\n", "Keep in mind that data type is a common source of errors!" ] @@ -265,7 +300,7 @@ "source": [ "train_x_norm = train_x_vectors / 255\n", "test_x_norm = test_x_vectors / 255\n", - "print(train_x_norm[0])" + "print(train_x_norm.max(), train_x_norm.min(), train_x_norm.mean(), train_x_norm.std())" ] }, { @@ -275,6 +310,15 @@ "Now, check the labels:" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(train_y[:20])" + ] + }, { "cell_type": "code", "execution_count": null, @@ -291,7 +335,7 @@ "metadata": {}, "outputs": [], "source": [ - "train_x.shape" + "train_x_norm.shape" ] }, { @@ -318,7 +362,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We begin by loading the data again:" + "We begin by loading, reshaping and normalizing the data again:" ] }, { @@ -357,7 +401,9 @@ "source": [ "## Using Modules\n", "\n", - "Let's create a linear model." + "PyTorch provides some basic building blocks for neural nets under `.nn` module. Here you can check the complete list of available blocks: https://pytorch.org/docs/stable/nn.html\n", + "\n", + "For now, let's recreate a simple linear model using `nn.Linear` (see [doc](https://pytorch.org/docs/stable/generated/torch.nn.Linear.html#torch.nn.Linear))." ] }, { @@ -416,7 +462,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### Loss" + "Now that we defined our model, we just have to: \n", + "- define an iterator\n", + "- define and compute the loss\n", + "- compute gradients\n", + "- define the strategy to update the parameters of our model\n", + "- glue previous steps to form the training loop!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Batching\n", + "\n", + "Batching can be boring to code. `DataLoader` helps!" ] }, { @@ -425,14 +485,19 @@ "metadata": {}, "outputs": [], "source": [ - "loss_function = nn.CrossEntropyLoss()" + "from torch.utils.data import DataLoader\n", + "\n", + "train_dataloader = DataLoader(train_dataset, batch_size=64, shuffle=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### Optimizer" + "#### Loss\n", + "\n", + "Here is the complete list of available loss functions: https://pytorch.org/docs/stable/nn.html#loss-functions\n", + "If the provided loss functions don't satisfy your constraints, it is easy to define your own loss function! Here is a simple example of how it works" ] }, { @@ -441,19 +506,28 @@ "metadata": {}, "outputs": [], "source": [ - "learning_rate = 0.1\n", - "\n", - "# the optimizer needs to be told which are the parameters to optimize\n", - "optimizer = torch.optim.SGD(linear_model.parameters(), lr=learning_rate, momentum=0.9)" + "with torch.no_grad(): # disable gradient-tracking\n", + " \n", + " dummy_loss = nn.CrossEntropyLoss()\n", + " \n", + " # try other losses!\n", + " # multi-class classification hinge loss (margin-based loss):\n", + " # dummy_loss = nn.MultiMarginLoss() \n", + " \n", + " batch = train_x[:2]\n", + " targets = train_y[:2]\n", + " predictions = linear_model(batch)\n", + " \n", + " print(predictions.shape, targets.shape)\n", + " print(dummy_loss(predictions, targets))\n", + " " ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### Batching\n", - "\n", - "Batching can be boring to code. `DataLoader` helps!" + "We will use the CrossEntropy function as our loss" ] }, { @@ -462,9 +536,40 @@ "metadata": {}, "outputs": [], "source": [ - "from torch.utils.data import DataLoader\n", + "loss_function = nn.CrossEntropyLoss()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Optimizer\n", "\n", - "train_dataloader = DataLoader(train_dataset, batch_size=64, shuffle=True)" + "The optimizer is the object which handles the update of the model's parameters. In the previous exercise, we were using the famous \"delta\" rule to update our weights:\n", + "\n", + "$$W_t = W_{t-1} - \\alpha \\frac{\\partial L}{\\partial W}.$$\n", + "\n", + "But there are more ellaborate ways of updating our parameters: \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "PyTorch provides an extensive list of optimizers: https://pytorch.org/docs/stable/optim.html. Notice that, as everything else, it should be easy to define your own optimizer procedure. \n", + "\n", + "In this lecture we will use the simple yet powerful SGD optmizer. The optimizer needs to be told which are the parameters to optimize." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "learning_rate = 0.1\n", + "parameters = linear_model.parameters() # we will optimize all model's parameters!\n", + "optimizer = torch.optim.SGD(parameters, lr=learning_rate, momentum=0.9)" ] }, { @@ -483,41 +588,51 @@ "outputs": [], "source": [ "def train_model(model, train_dataloader, num_epochs, optimizer):\n", + " # Tell PyTorch that we are in training mode.\n", + " # This is useful for mechanisms that work differently during training and test time, like Dropout. \n", + " model.train()\n", + " \n", " losses = []\n", - "\n", + " \n", " for epoch in range(1, num_epochs+1):\n", " print('Starting epoch %d' % epoch)\n", " total_loss = 0\n", " hits = 0\n", "\n", " for batch_x, batch_y in train_dataloader:\n", + " # Step 1. Remember that PyTorch accumulates gradients.\n", + " # We need to clear them out before each instance\n", " optimizer.zero_grad()\n", - " # get the data for this batch\n", + " \n", + " # Step 2. Get the data for this batch\n", " batch_x = batch_x.reshape(batch_x.shape[0], -1)\n", - " batch_x = batch_x.to(torch.float) / 255\n", + " batch_x = batch_x.to(torch.float) / 255.0\n", "\n", - " # forward pass\n", + " # Step 3. Run forward pass.\n", " logits = model(batch_x)\n", "\n", - " # compute the loss\n", + " # Step 4. Compute loss\n", " loss = loss_function(logits, batch_y)\n", + " \n", + " # Step 5. Compute gradeints\n", + " loss.backward()\n", + " \n", + " # Step 6. After determining the gradients, take a step toward their direction\n", + " optimizer.step()\n", + " \n", + " # Optional. Save statistics of your training\n", " loss_value = loss.item()\n", " total_loss += loss_value\n", " losses.append(loss_value)\n", - "\n", " y_pred = logits.argmax(dim=1)\n", - "\n", " hits += torch.sum(y_pred == batch_y).item()\n", "\n", - " loss.backward()\n", - " # after determining the gradients, take a step toward their direction\n", - " optimizer.step()\n", - "\n", " avg_loss = total_loss / len(train_dataloader.dataset)\n", " print('Epoch loss: %.4f' % avg_loss)\n", " acc = hits / len(train_dataloader.dataset)\n", " print('Epoch accuracy: %.4f' % acc)\n", " \n", + " print('Done!')\n", " return np.array(losses)" ] }, @@ -548,6 +663,13 @@ "ax.legend()" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "What can you conclude from this?" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -571,11 +693,13 @@ " linear_layer1 = nn.Linear(n_features, hidden_size)\n", " linear_layer2 = nn.Linear(hidden_size, hidden_size)\n", " linear_layer3 = nn.Linear(hidden_size, n_classes)\n", - " self.feedforward = nn.Sequential(linear_layer1, \n", - " nn.Tanh(), \n", - " linear_layer2,\n", - " nn.Tanh(),\n", - " linear_layer3)\n", + " self.feedforward = nn.Sequential(\n", + " linear_layer1, \n", + " nn.Tanh(), \n", + " linear_layer2, \n", + " nn.Tanh(),\n", + " linear_layer3\n", + " )\n", "\n", " def forward(self, X):\n", " return self.feedforward(X)\n", @@ -639,15 +763,17 @@ "outputs": [], "source": [ "def evaluate_model(model, test_x, test_y):\n", + " # Tell PyTorch that we are in evaluation mode.\n", " model.eval()\n", + "\n", " with torch.no_grad():\n", " loss_function = torch.nn.CrossEntropyLoss()\n", " logits = model(test_x)\n", " loss = loss_function(logits, test_y)\n", - " \n", + "\n", " y_pred = logits.argmax(dim=1)\n", " hits = torch.sum(y_pred == test_y).item()\n", - " model.train()\n", + " \n", " return loss / len(test_x), hits / len(test_x)" ] }, @@ -722,7 +848,8 @@ " linear_layer1,\n", " nn.Tanh(),\n", " nn.Dropout(p_dropout),\n", - " linear_layer2)\n", + " linear_layer2\n", + " )\n", "\n", " def forward(self, X):\n", " return self.feedforward(X)\n", @@ -841,6 +968,15 @@ "![https://twitter.com/karpathy/status/1013244313327681536](img/common_mistakes.png)\n", "https://twitter.com/karpathy/status/1013244313327681536" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Exercises\n", + "\n", + "- Run the MLP example for more epochs" + ] } ], "metadata": { From b1b3129a1c59cd4ade2801df54d2619d6e1b64a9 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:16:04 -0300 Subject: [PATCH 06/18] Update lecture 03 --- 03-modules-and-mlps.ipynb | 3 --- 1 file changed, 3 deletions(-) diff --git a/03-modules-and-mlps.ipynb b/03-modules-and-mlps.ipynb index 150bb6f..b8f0639 100644 --- a/03-modules-and-mlps.ipynb +++ b/03-modules-and-mlps.ipynb @@ -72,9 +72,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import torch\n", - "\n", "np.random.seed(0)\n", "torch.manual_seed(0);" ] From 753d2c952e7ebbdf1f64054f27fcbb7de4fe3aad Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:37:01 -0300 Subject: [PATCH 07/18] Add optional lecture 4 --- 04-optional-word2vec.ipynb | 258 +++++++++++++++++++++++++++++++++++++ 1 file changed, 258 insertions(+) create mode 100644 04-optional-word2vec.ipynb diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb new file mode 100644 index 0000000..0c1b4ca --- /dev/null +++ b/04-optional-word2vec.ipynb @@ -0,0 +1,258 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Word2Vec\n", + "\n", + "\"Word2vec is a technique for natural language processing. The word2vec algorithm uses a neural network model to learn word associations from a large corpus of text. Once trained, such a model can detect synonymous words or suggest additional words for a partial sentence. As the name implies, word2vec represents each distinct word with a particular list of numbers called a vector. The vectors are chosen carefully such that a simple mathematical function (the cosine similarity between the vectors) indicates the level of semantic similarity between the words represented by those vectors.\" [https://en.wikipedia.org/wiki/Word2vec]\n", + "\n", + "Here we will build a PyTorch model that implements Word2Vec's CBOW strategy." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "from pprint import pprint\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "from IPython.core.debugger import set_trace" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "np.random.seed(0)\n", + "torch.manual_seed(0);" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" + ] + } + ], + "source": [ + "raw_text = \"\"\"We are about to study the idea of a computational process.\n", + "Computational processes are abstract beings that inhabit computers.\n", + "As they evolve, processes manipulate other abstract things called data.\n", + "The evolution of a process is directed by a pattern of rules\n", + "called a program. People create programs to direct processes. In effect,\n", + "we conjure the spirits of the computer with our spells.\"\"\".split()\n", + "print(raw_text[:11])" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "49\n", + "{'by': 2, 'we': 3, 'computers.': 4, 'they': 5, 'called': 6, 'computer': 7, 'The': 8, 'is': 9, 'rules': 10, 'a': 11, 'effect,': 12, 'to': 13, 'are': 14, 'things': 15, 'data.': 16, 'our': 17, 'computational': 18, 'direct': 19, 'process.': 20, 'of': 21, 'beings': 22, 'spells.': 23, 'We': 24, 'evolve,': 25, 'directed': 26, 'programs': 27, 'pattern': 28, 'In': 29, 'conjure': 30, 'program.': 31, 'manipulate': 32, 'evolution': 33, 'idea': 34, 'about': 35, 'with': 36, 'abstract': 37, 'inhabit': 38, 'As': 39, 'spirits': 40, 'Computational': 41, 'study': 42, 'process': 43, 'processes': 44, 'People': 45, 'processes.': 46, 'that': 47, 'the': 48, 'create': 49, 'other': 50}\n" + ] + } + ], + "source": [ + "# By deriving a set from `raw_text`, we deduplicate the array\n", + "vocab = set(raw_text)\n", + "vocab_size = len(vocab)\n", + "\n", + "# shifted by 2 due to special tokens for padding and unknown tokens\n", + "word_to_ix = {word: i + 2 for i, word in enumerate(vocab)}\n", + "ix_to_word = list(word_to_ix.values())\n", + "print(vocab_size)\n", + "print(word_to_ix)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(['study', 'to', 'are', 'We'], 'about'), (['the', 'study', 'about', 'are'], 'to'), (['idea', 'the', 'to', 'about'], 'study'), (['of', 'idea', 'study', 'to'], 'the'), (['a', 'of', 'the', 'study'], 'idea')]\n" + ] + } + ], + "source": [ + "context_size = 2 # 2 words to the left, 2 to the right\n", + "data = []\n", + "for i in range(context_size, len(raw_text) - context_size):\n", + " context = [raw_text[i - 2], raw_text[i - 1], raw_text[i + 1], raw_text[i + 2]]\n", + " context = [raw_text[i - j] for j in range(- context_size, context_size + 1) if j != 0]\n", + " target = raw_text[i]\n", + " data.append((context, target))\n", + "print(data[:5])" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(['study', 'to', 'about', 'are', 'We'], 'about'), (['the', 'study', 'to', 'about', 'are'], 'to'), (['idea', 'the', 'study', 'to', 'about'], 'study'), (['of', 'idea', 'the', 'study', 'to'], 'the'), (['a', 'of', 'idea', 'the', 'study'], 'idea')]\n" + ] + }, + { + "data": { + "text/plain": [ + "tensor([40, 11, 33, 12, 22])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "class CBOW(nn.Module):\n", + "\n", + " def __init__(self, vocab_size, emb_size):\n", + " self.embeddings = nn.Embedding(vocab_size, emb_size)\n", + " self.lin_out = nn.Linear(emb_size, vocab_size)\n", + "\n", + " def forward(self, x):\n", + " # (bs, 4, vocab_size) -> (bs, 4, emb_dim)\n", + " x = self.emb(x)\n", + " # (bs, 4, emb_dim) -> (bs, 4*emb_dim)\n", + " x = x.view(x.shape[0], -1)\n", + " # (bs, 4*emb_dim) -> (bs, vocab_size)\n", + " x = self.lin_out(x)\n", + " return torch.log_softmax(x, dim=-1)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Exercise\n", + "Instantiate the model and write a proper training loop. Here are some functions to help you make the data ready for use:" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "def get_list_of_ids(context, word_to_ix):\n", + " list_of_ids = []\n", + " for w in context:\n", + " if w in word_to_ix:\n", + " list_of_ids.append(word_to_ix[w])\n", + " else:\n", + " list_of_ids.append(1) # unknown id = 1\n", + " return list_of_ids\n", + "\n", + "\n", + "def get_target_id(target, word_to_ix):\n", + " target_word_id = 0\n", + " if target in word_to_ix:\n", + " target_word_id = word_to_ix[target]\n", + " return target_word_id\n", + "\n", + "\n", + "def make_context_vector(context, word_to_ix):\n", + " idxs = get_list_of_ids(context, word_to_ix)\n", + " return torch.tensor(idxs, dtype=torch.long)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[42, 13, 14, 24]\n", + "35\n", + "tensor([42, 13, 14, 24])\n" + ] + } + ], + "source": [ + "print(get_list_of_ids(data[0][0], word_to_ix))\n", + "print(get_target_id(data[0][1], word_to_ix))\n", + "print(make_context_vector(data[0][0], word_to_ix))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## More information\n", + "\n", + "If you like, these PyTorch's NLP tutorials are a good place to start building NLP models:\n", + "\n", + "- https://pytorch.org/tutorials/intermediate/seq2seq_translation_tutorial.html\n", + "- https://pytorch.org/tutorials/beginner/transformer_tutorial.html\n", + "- https://pytorch.org/tutorials/intermediate/char_rnn_classification_tutorial.html\n", + "- https://pytorch.org/tutorials/intermediate/char_rnn_generation_tutorial.html\n", + "- https://pytorch.org/tutorials/beginner/text_sentiment_ngrams_tutorial.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From 74106968787c20f25f90eebd905bbca2f0975edf Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:37:17 -0300 Subject: [PATCH 08/18] Add optional lecture 4 --- 04-optional-word2vec.ipynb | 81 +++++++------------------------------- 1 file changed, 14 insertions(+), 67 deletions(-) diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index 0c1b4ca..1f1c565 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -25,7 +25,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -38,7 +38,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -48,17 +48,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" - ] - } - ], + "outputs": [], "source": [ "raw_text = \"\"\"We are about to study the idea of a computational process.\n", "Computational processes are abstract beings that inhabit computers.\n", @@ -71,20 +63,11 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "49\n", - "{'by': 2, 'we': 3, 'computers.': 4, 'they': 5, 'called': 6, 'computer': 7, 'The': 8, 'is': 9, 'rules': 10, 'a': 11, 'effect,': 12, 'to': 13, 'are': 14, 'things': 15, 'data.': 16, 'our': 17, 'computational': 18, 'direct': 19, 'process.': 20, 'of': 21, 'beings': 22, 'spells.': 23, 'We': 24, 'evolve,': 25, 'directed': 26, 'programs': 27, 'pattern': 28, 'In': 29, 'conjure': 30, 'program.': 31, 'manipulate': 32, 'evolution': 33, 'idea': 34, 'about': 35, 'with': 36, 'abstract': 37, 'inhabit': 38, 'As': 39, 'spirits': 40, 'Computational': 41, 'study': 42, 'process': 43, 'processes': 44, 'People': 45, 'processes.': 46, 'that': 47, 'the': 48, 'create': 49, 'other': 50}\n" - ] - } - ], + "outputs": [], "source": [ "# By deriving a set from `raw_text`, we deduplicate the array\n", "vocab = set(raw_text)\n", @@ -99,17 +82,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(['study', 'to', 'are', 'We'], 'about'), (['the', 'study', 'about', 'are'], 'to'), (['idea', 'the', 'to', 'about'], 'study'), (['of', 'idea', 'study', 'to'], 'the'), (['a', 'of', 'the', 'study'], 'idea')]\n" - ] - } - ], + "outputs": [], "source": [ "context_size = 2 # 2 words to the left, 2 to the right\n", "data = []\n", @@ -123,27 +98,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(['study', 'to', 'about', 'are', 'We'], 'about'), (['the', 'study', 'to', 'about', 'are'], 'to'), (['idea', 'the', 'study', 'to', 'about'], 'study'), (['of', 'idea', 'the', 'study', 'to'], 'the'), (['a', 'of', 'idea', 'the', 'study'], 'idea')]\n" - ] - }, - { - "data": { - "text/plain": [ - "tensor([40, 11, 33, 12, 22])" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "class CBOW(nn.Module):\n", "\n", @@ -171,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -199,19 +156,9 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[42, 13, 14, 24]\n", - "35\n", - "tensor([42, 13, 14, 24])\n" - ] - } - ], + "outputs": [], "source": [ "print(get_list_of_ids(data[0][0], word_to_ix))\n", "print(get_target_id(data[0][1], word_to_ix))\n", From eb400151a4f058f2020f5053a01d53ca7ee2df84 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:42:34 -0300 Subject: [PATCH 09/18] Update lecture 04 --- 04-optional-word2vec.ipynb | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index 1f1c565..f887820 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -11,6 +11,13 @@ "Here we will build a PyTorch model that implements Word2Vec's CBOW strategy." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, { "cell_type": "code", "execution_count": null, @@ -111,11 +118,11 @@ " def forward(self, x):\n", " # (bs, 4, vocab_size) -> (bs, 4, emb_dim)\n", " x = self.emb(x)\n", - " # (bs, 4, emb_dim) -> (bs, 4*emb_dim)\n", - " x = x.view(x.shape[0], -1)\n", - " # (bs, 4*emb_dim) -> (bs, vocab_size)\n", - " x = self.lin_out(x)\n", - " return torch.log_softmax(x, dim=-1)\n" + " # (bs, 4, emb_dim) -> (bs, emb_dim)\n", + " x = x.sum(dim=1)\n", + " # (bs, emb_dim) -> (bs, vocab_size)\n", + " logits = self.lin_out(x)\n", + " return logits" ] }, { From 15881f5b724bc91f57214adc582a98ca2c944e0b Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:46:16 -0300 Subject: [PATCH 10/18] Update lecture 04 --- 04-optional-word2vec.ipynb | 105 +++++++++++++++++++++++-------------- 1 file changed, 67 insertions(+), 38 deletions(-) diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index f887820..8cc067f 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -58,6 +58,44 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [ + "class CBOW(nn.Module):\n", + "\n", + " def __init__(self, vocab_size, emb_size):\n", + " self.embeddings = nn.Embedding(vocab_size, emb_size)\n", + " self.lin_out = nn.Linear(emb_size, vocab_size)\n", + "\n", + " def forward(self, x):\n", + " # (bs, context_size, vocab_size) -> (bs, context_size, emb_dim)\n", + " x = self.emb(x)\n", + " # (bs, context_size, emb_dim) -> (bs, emb_dim)\n", + " x = x.sum(dim=1)\n", + " # (bs, emb_dim) -> (bs, vocab_size)\n", + " logits = self.lin_out(x)\n", + " return logits" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Exercise\n", + "Instantiate the model and write a proper training loop. Here are some functions to help you make the data ready for use:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" + ] + } + ], "source": [ "raw_text = \"\"\"We are about to study the idea of a computational process.\n", "Computational processes are abstract beings that inhabit computers.\n", @@ -70,67 +108,58 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "49\n", + "{'inhabit': 2, 'of': 3, 'beings': 4, 'evolve,': 5, 'People': 6, 'manipulate': 7, 'computers.': 8, 'that': 9, 'computational': 10, 'directed': 11, 'rules': 12, 'to': 13, 'process.': 14, 'programs': 15, 'study': 16, 'effect,': 17, 'conjure': 18, 'we': 19, 'As': 20, 'data.': 21, 'called': 22, 'by': 23, 'In': 24, 'spells.': 25, 'about': 26, 'evolution': 27, 'program.': 28, 'processes.': 29, 'create': 30, 'Computational': 31, 'our': 32, 'the': 33, 'things': 34, 'computer': 35, 'direct': 36, 'The': 37, 'are': 38, 'a': 39, 'spirits': 40, 'with': 41, 'they': 42, 'other': 43, 'process': 44, 'processes': 45, 'is': 46, 'idea': 47, 'We': 48, 'pattern': 49, 'abstract': 50}\n" + ] + } + ], "source": [ - "# By deriving a set from `raw_text`, we deduplicate the array\n", "vocab = set(raw_text)\n", "vocab_size = len(vocab)\n", "\n", "# shifted by 2 due to special tokens for padding and unknown tokens\n", "word_to_ix = {word: i + 2 for i, word in enumerate(vocab)}\n", "ix_to_word = list(word_to_ix.values())\n", + "\n", "print(vocab_size)\n", "print(word_to_ix)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(['We', 'are', 'to', 'study'], 'about')\n", + "(['are', 'about', 'study', 'the'], 'to')\n", + "(['about', 'to', 'the', 'idea'], 'study')\n" + ] + } + ], "source": [ - "context_size = 2 # 2 words to the left, 2 to the right\n", + "context_size = 2 # 2 words to the left and 2 to the right\n", "data = []\n", "for i in range(context_size, len(raw_text) - context_size):\n", - " context = [raw_text[i - 2], raw_text[i - 1], raw_text[i + 1], raw_text[i + 2]]\n", - " context = [raw_text[i - j] for j in range(- context_size, context_size + 1) if j != 0]\n", + " context = [raw_text[i + j] for j in range(- context_size, context_size + 1) if j != 0]\n", " target = raw_text[i]\n", " data.append((context, target))\n", - "print(data[:5])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class CBOW(nn.Module):\n", - "\n", - " def __init__(self, vocab_size, emb_size):\n", - " self.embeddings = nn.Embedding(vocab_size, emb_size)\n", - " self.lin_out = nn.Linear(emb_size, vocab_size)\n", "\n", - " def forward(self, x):\n", - " # (bs, 4, vocab_size) -> (bs, 4, emb_dim)\n", - " x = self.emb(x)\n", - " # (bs, 4, emb_dim) -> (bs, emb_dim)\n", - " x = x.sum(dim=1)\n", - " # (bs, emb_dim) -> (bs, vocab_size)\n", - " logits = self.lin_out(x)\n", - " return logits" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exercise\n", - "Instantiate the model and write a proper training loop. Here are some functions to help you make the data ready for use:" + "print(data[0])\n", + "print(data[1])\n", + "print(data[2])" ] }, { From da72526f75611ccbd0ba6a067cbbc289357df0f3 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 06:46:31 -0300 Subject: [PATCH 11/18] Update lecture 04 --- 04-optional-word2vec.ipynb | 39 ++++++-------------------------------- 1 file changed, 6 insertions(+), 33 deletions(-) diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index 8cc067f..12ff090 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -85,17 +85,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" - ] - } - ], + "outputs": [], "source": [ "raw_text = \"\"\"We are about to study the idea of a computational process.\n", "Computational processes are abstract beings that inhabit computers.\n", @@ -108,20 +100,11 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "49\n", - "{'inhabit': 2, 'of': 3, 'beings': 4, 'evolve,': 5, 'People': 6, 'manipulate': 7, 'computers.': 8, 'that': 9, 'computational': 10, 'directed': 11, 'rules': 12, 'to': 13, 'process.': 14, 'programs': 15, 'study': 16, 'effect,': 17, 'conjure': 18, 'we': 19, 'As': 20, 'data.': 21, 'called': 22, 'by': 23, 'In': 24, 'spells.': 25, 'about': 26, 'evolution': 27, 'program.': 28, 'processes.': 29, 'create': 30, 'Computational': 31, 'our': 32, 'the': 33, 'things': 34, 'computer': 35, 'direct': 36, 'The': 37, 'are': 38, 'a': 39, 'spirits': 40, 'with': 41, 'they': 42, 'other': 43, 'process': 44, 'processes': 45, 'is': 46, 'idea': 47, 'We': 48, 'pattern': 49, 'abstract': 50}\n" - ] - } - ], + "outputs": [], "source": [ "vocab = set(raw_text)\n", "vocab_size = len(vocab)\n", @@ -136,19 +119,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(['We', 'are', 'to', 'study'], 'about')\n", - "(['are', 'about', 'study', 'the'], 'to')\n", - "(['about', 'to', 'the', 'idea'], 'study')\n" - ] - } - ], + "outputs": [], "source": [ "context_size = 2 # 2 words to the left and 2 to the right\n", "data = []\n", From afc09b55f95b18ac1945b5f4e826d7fd8b6fa0ed Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 14 Oct 2020 11:57:59 -0300 Subject: [PATCH 12/18] Update lecture 04 --- 04-optional-word2vec.ipynb | 85 +++++++++++++++++++++++++++++--------- 1 file changed, 66 insertions(+), 19 deletions(-) diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index 12ff090..71fa126 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -32,7 +32,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -45,7 +45,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -66,12 +66,15 @@ " self.lin_out = nn.Linear(emb_size, vocab_size)\n", "\n", " def forward(self, x):\n", - " # (bs, context_size, vocab_size) -> (bs, context_size, emb_dim)\n", + " # (bs, context_size) -> (bs, context_size, emb_dim)\n", " x = self.emb(x)\n", + " \n", " # (bs, context_size, emb_dim) -> (bs, emb_dim)\n", " x = x.sum(dim=1)\n", + "\n", " # (bs, emb_dim) -> (bs, vocab_size)\n", " logits = self.lin_out(x)\n", + "\n", " return logits" ] }, @@ -85,9 +88,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" + ] + } + ], "source": [ "raw_text = \"\"\"We are about to study the idea of a computational process.\n", "Computational processes are abstract beings that inhabit computers.\n", @@ -100,28 +111,54 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": { "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "51\n", + "{'': 0, '': 1, 'they': 2, 'study': 3, 'about': 4, 'the': 5, 'beings': 6, 'by': 7, 'effect,': 8, 'to': 9, 'our': 10, 'spells.': 11, 'The': 12, 'process.': 13, 'processes.': 14, 'are': 15, 'evolve,': 16, 'rules': 17, 'a': 18, 'things': 19, 'People': 20, 'is': 21, 'computers.': 22, 'Computational': 23, 'we': 24, 'computer': 25, 'evolution': 26, 'manipulate': 27, 'As': 28, 'program.': 29, 'of': 30, 'processes': 31, 'inhabit': 32, 'We': 33, 'abstract': 34, 'direct': 35, 'with': 36, 'programs': 37, 'conjure': 38, 'In': 39, 'directed': 40, 'other': 41, 'process': 42, 'data.': 43, 'create': 44, 'pattern': 45, 'idea': 46, 'computational': 47, 'that': 48, 'called': 49, 'spirits': 50}\n", + "they\n" + ] + } + ], "source": [ "vocab = set(raw_text)\n", - "vocab_size = len(vocab)\n", + "\n", "\n", "# shifted by 2 due to special tokens for padding and unknown tokens\n", - "word_to_ix = {word: i + 2 for i, word in enumerate(vocab)}\n", - "ix_to_word = list(word_to_ix.values())\n", + "word_to_ix = {}\n", + "word_to_ix[''] = 0\n", + "word_to_ix[''] = 1\n", + "for i, word in enumerate(vocab):\n", + " word_to_ix[word] = i + 2\n", + "ix_to_word = list(word_to_ix.keys())\n", + "vocab_size = len(word_to_ix)\n", "\n", "print(vocab_size)\n", - "print(word_to_ix)" + "print(word_to_ix)\n", + "print(ix_to_word[2])" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(['We', 'are', 'to', 'study'], 'about')\n", + "(['are', 'about', 'study', 'the'], 'to')\n", + "(['about', 'to', 'the', 'idea'], 'study')\n" + ] + } + ], "source": [ "context_size = 2 # 2 words to the left and 2 to the right\n", "data = []\n", @@ -137,7 +174,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -165,9 +202,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[33, 15, 9, 3]\n", + "4\n", + "tensor([33, 15, 9, 3])\n" + ] + } + ], "source": [ "print(get_list_of_ids(data[0][0], word_to_ix))\n", "print(get_target_id(data[0][1], word_to_ix))\n", @@ -183,10 +230,10 @@ "If you like, these PyTorch's NLP tutorials are a good place to start building NLP models:\n", "\n", "- https://pytorch.org/tutorials/intermediate/seq2seq_translation_tutorial.html\n", - "- https://pytorch.org/tutorials/beginner/transformer_tutorial.html\n", "- https://pytorch.org/tutorials/intermediate/char_rnn_classification_tutorial.html\n", "- https://pytorch.org/tutorials/intermediate/char_rnn_generation_tutorial.html\n", - "- https://pytorch.org/tutorials/beginner/text_sentiment_ngrams_tutorial.html" + "- https://pytorch.org/tutorials/beginner/text_sentiment_ngrams_tutorial.html\n", + "- https://pytorch.org/tutorials/beginner/transformer_tutorial.html" ] } ], From a50e0f3578e809d823a53ad0f875c7ae29673eec Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Wed, 4 May 2022 07:41:45 +0100 Subject: [PATCH 13/18] Update files --- 00-intro.ipynb | 217 ++++-- 01-pytorch-basics.ipynb | 700 +++++++++++++++---- 02-linear-regression.ipynb | 388 ++++++++--- 03-modules-and-mlps.ipynb | 341 ++++------ 04-optional-word2vec.ipynb | 367 ++++++---- README.md | 13 +- bonus-computational-efficiency.ipynb | 314 ++++----- challenges-for-true-pytorch-heroes.ipynb | 817 +++++++++++++++++++++++ requirements.txt | 6 + spec.py | 182 +++++ 10 files changed, 2586 insertions(+), 759 deletions(-) create mode 100644 challenges-for-true-pytorch-heroes.ipynb create mode 100644 spec.py diff --git a/00-intro.ipynb b/00-intro.ipynb index 7262673..a07547f 100644 --- a/00-intro.ipynb +++ b/00-intro.ipynb @@ -6,49 +6,172 @@ "source": [ "# Introduction\n", "\n", - "Material for this lecture is here: https://github.com/mtreviso/pytorch-lecture\n", + "The material for this course is here: https://github.com/mtreviso/pytorch-lecture. \n", "\n", - "**Note:**\n", - "If you use PyTorch on a daily basis, you will most probably not learn a lot during this lecture.\n", + "
\n", + "
\n", + " What we are NOT going to cover in this course:
\n", + " How to implement SOTA models\n", + "
\n", + " How to optimize our code\n", + "
\n", + " How autograd is implemented\n", + "
\n", + " How to use the new fancy stuff: mobile support, distributed training, quantization, sparse tensors, etc.\n", + "

\n", + " Instead, we are going to:
\n", + " Understand the key PyTorch concepts (e.g., tensors, modules, autograd, broadcasting, ...)\n", + "
\n", + " Understand what PyTorch can and cannot do\n", + "
\n", + " Create simple neural networks and get and idea of how we can implement more complex models in the future\n", + "
\n", + " Kick off with PyTorch 🚀\n", + "
\n", "\n", - "**Goals:**\n", - "- understand PyTorch concepts (e.g. autograd, broadcasting, ...) and understand what it can and cannot do\n", - "- be aware of some handy tools/libraries\n", - "- be able to create simple neural networks\n", - "- learn some tools that will help to code more complex models in the future" + "> If you use PyTorch on a daily basis, you will most probably not learn a lot during this lecture." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# PyTorch Overview\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Quick Recap of Jupyter Notebooks\n", "\n", + "A jupyter notebook document has the `.ipynb` extension and is composed of a number of cells. In cells, you can write program code in Python and create notes in markdown style. These three types of cells correspond to:\n", + " \n", + " 1. code\n", + " 2. markdown\n", + " 3. raw\n", + " \n", + "To work with the contents of a cell, use *Edit mode* (turns on by pressing **Enter** after selecting a cell), and to navigate between cells, use *command mode* (turns on by pressing **Esc**).\n", + "\n", + "The cell type can be set in command mode either using hotkeys (**y** to code, **m** to markdown, **r** to edit raw text), or in the menu *Cell -> Cell type* ... " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Example" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# cell with code\n", + "a = 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "a = 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "a\n", + "print(a)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Cell with markdown text" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "Cell with raw text" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, press `Shift + Enter` to process the contents of the cell:\n", + "interpret the code or lay out the marked-up text." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Basic shortcuts\n", "\n", - "> \"PyTorch - An open source machine learning framework that accelerates the path from research prototyping to production deployment.\"\n", - ">\n", - "> -- https://pytorch.org/\n", + "- `a` creates a cell above the current cell\n", + "- `b` creates a cell below the current cell\n", + "- `dd` deletes the curent cell\n", + "- `Enter` enters in edit mode\n", + "- `Esc` exits edit mode\n", + "- `Ctrl` + `Enter` runs the cell\n", + "- `Shift` + `Enter` runs the cell and creates (or jumps to) a next one\n", + "- `m` converts the current cell to markdown\n", + "- `y` converts the current cell to code" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "> ***Word of caution***
\n", + "> Jupyter-notebook is a great tool for data science since we can see the direct effect of a snippet of code, either by plotting the result or by inspecting the direct output. However, we should be careful with the order in which we run cells (this is a common source of errors).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PyTorch Overview\n", "\n", - "This was the tagline prior to PyTorch 1.0.\n", - "Now it's:\n", "\n", "> \"PyTorch - From Research To Production\n", "> \n", - "> An open source deep learning platform that provides a seamless path from research prototyping to production deployment.\"" + "> An open source machine learning framework that accelerates the path from research prototyping to production deployment.\"\n", + "> -- https://pytorch.org/" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## \"Build by run\" - what is that and why do I care?" + "## \"Build by run\" - what is that and why do I care?\n", + "\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "![](img/dynamic_graph.gif)" + "" ] }, { @@ -65,14 +188,14 @@ "outputs": [], "source": [ "import torch\n", - "from IPython.core.debugger import set_trace\n", + "import ipdb\n", "\n", "def f(x):\n", " res = x + x\n", - " set_trace() # <-- :o\n", + " ipdb.set_trace() # <-- :o\n", " return res\n", "\n", - "x = torch.randn(1, 10)\n", + "x = torch.randn(1, 8)\n", "f(x)" ] }, @@ -80,23 +203,28 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## TensorFlow and PyTorch\n", - "\n", - "- static vs dynamic\n", - "- production vs research\n", - "\n", - "https://thegradient.pub/state-of-ml-frameworks-2019-pytorch-dominates-research-tensorflow-dominates-industry/\n", - "\n", - "> *(...) researchers are abandoning TensorFlow and flocking to PyTorch in droves. Meanwhile in industry, Tensorflow is currently the platform of choice (...)*\n", - "\n", - "> *In 2018, PyTorch was a minority. Now, it is an overwhelming majority, with 69% of CVPR using PyTorch, 75+% of both NAACL and ACL, and 50+% of ICLR and ICML. While PyTorch’s dominance is strongest at vision and language conferences (outnumbering TensorFlow by 2:1 and 3:1 respectively), PyTorch is also more popular than TensorFlow at general machine learning conferences like ICLR and ICML.*" + "## Other reasons for using PyTorch\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "![](img/the_real_reason.png)" + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "- Seamless GPU integration\n", + "- Production ready\n", + "- Distributed training\n", + "- Mobile support\n", + "- Cloud support\n", + "- Robust ecosystem\n", + "- C++ front-end\n" ] }, { @@ -105,6 +233,7 @@ "source": [ "## Other neural network toolkits you might want to check out\n", "- TensorFlow\n", + "- JAX\n", "- MXNet\n", "- Keras\n", "- CNTK\n", @@ -114,7 +243,14 @@ "- dynet\n", "- many many more\n", "\n", - "All of them are good!\n" + "Which one to choose? There is no bullet silver. All of them are good!\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---" ] }, { @@ -132,28 +268,17 @@ "- PyTorch Text: https://github.com/pytorch/text\n", "- PyTorch Audio: https://github.com/pytorch/audio\n", "\n", - "## Tutorials I based this on...\n", "\n", + "More tutorials:\n", "- https://github.com/sotte/pytorch_tutorial\n", "- https://github.com/erickrf/pytorch-lecture\n", "- https://github.com/goncalomcorreia/pytorch-lecture" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Some Jupyter Notebook shortcuts that might be useful:\n", - "\n", - "- `A` and `B` create a cell above and below, respectively\n", - "- `Shift+Enter` runs the cell and jumps to the next one/creates one below\n", - "- `D`, `D` deletes the cell" - ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -167,7 +292,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/01-pytorch-basics.ipynb b/01-pytorch-basics.ipynb index 573c3c4..66d9b48 100644 --- a/01-pytorch-basics.ipynb +++ b/01-pytorch-basics.ipynb @@ -4,26 +4,18 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# An introduction to Pytorch\n", + "# An introduction to PyTorch\n", "\n", - "Pytorch is a platform for deep learning in Python/C++. In this lecture we will focus in the Python landscape. \n", - "\n", - "It provides tools for efficiently creating, training, testing and analyzing neural networks:\n", - "\n", - "* Different types of layers (embedding, linear, convolutional, recurrent)\n", - "* Activation functions (tanh, relu, sigmoid, etc.)\n", - "* Gradient computation\n", - "* Optimizer (adam, adagrad, RMSprop, SGD, etc.)\n", - "* Implementations speed gains in GPU" + "PyTorch is a platform for deep learning in Python or C++. In this lecture we will focus in the **Python** landscape. " ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Tensors\n", + "# Tensors\n", "\n", - "Let's start with some basics: tensors are similar to numpy arrays" + "Tensors are elementary units of PyTorch. They are very similar to numpy arrays" ] }, { @@ -33,9 +25,9 @@ "outputs": [], "source": [ "import numpy as np\n", - "import torch\n", - "\n", "np.random.seed(0)\n", + "\n", + "import torch\n", "torch.manual_seed(0)" ] }, @@ -45,12 +37,8 @@ "metadata": {}, "outputs": [], "source": [ - "v1 = np.arange(10)\n", - "v2 = np.arange(10, 20)\n", - "\n", - "print(\"v1: %s\\n\" % v1)\n", - "print(\"v2: %s\\n\" % v2)\n", - "print(\"Dot product: %d\" % v1.dot(v2))" + "x = np.array([1.0, 2.0, 3.0])\n", + "y = torch.tensor([1.0, 2.0, 3.0], requires_grad=True)" ] }, { @@ -59,34 +47,136 @@ "metadata": {}, "outputs": [], "source": [ - "v1 = torch.arange(10)\n", - "v2 = torch.arange(10, 20)\n", - "\n", - "print(\"v1: %s\\n\" % v1)\n", - "print(\"v2: %s\\n\" % v2)\n", - "print(\"Dot product: %d\" % v1.dot(v2))" + "x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "z = y ** 2\n", + "z" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### Setting values manually or randomly:" + "Broadly speaking, a tensor is like a numpy array that can carry gradient information from the chain of operations applied on top of it. There are other flavors that make them different, but this is the key distinction." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating tensors " ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": true - }, + "metadata": {}, "outputs": [], "source": [ - "v3 = np.array([2, 4, 6, 8])\n", - "v4 = np.random.random(10)\n", + "# directly from data\n", + "data = [[0, 1], [1, 0]]\n", + "x_data = torch.tensor(data)\n", + "x_data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# from a numpy array\n", + "x_numpy = np.array([[1, 2], [3, 4]])\n", + "x_torch = torch.from_numpy(x_numpy)\n", + "x_torch" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# convert it back to a numpy array\n", + "x_numpy = x_torch.numpy()\n", + "x_numpy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# with constant data\n", + "x = torch.ones(2, 3) # 2 rows and 3 columns\n", + "print(x)\n", + "y = torch.zeros(3, 2) # 3 rows and 2 columns\n", + "print(y)\n", + "z = torch.full((3, 1), -5) # 3 row and 1 columns (aka column vector)\n", + "print(z)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# with random data\n", + "x = torch.rand(2, 3) # uniform distribution U(0, 1)\n", + "print(x)\n", + "y = torch.randn(2, 3) # standard gaussian N(0, 1)\n", + "print(y)\n", + "z = torch.randint(0, 10, size=(2, 3)) # random integers [0, 10)\n", + "print(z)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# other initializations\n", + "print(torch.arange(5)) # from 0 (inclusive) to 5 (exclusive)\n", + "print(torch.arange(2, 8)) # from 2 to 8\n", + "print(torch.arange(2, 8, 2)) # from 2 to 8, with stepsize=2\n", "\n", - "print(\"v3: %s\\n\" % v3)\n", - "print(\"v4: %s\\n\" % v4)" + "print(torch.linspace(0, 1, 6)) # returns 6 linear spaced numbers from 0 to 1 (inclusive)\n", + "print(torch.linspace(-1, 1, 8)) # returns 8 linear spaced numbers form -1 to 1 \n", + "\n", + "print(torch.eye(3)) # identity matrix" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "See the full set of creation ops [here](https://pytorch.org/docs/stable/torch.html#creation-ops)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Tensor attributes" ] }, { @@ -95,18 +185,92 @@ "metadata": {}, "outputs": [], "source": [ - "v3 = torch.tensor([2, 4, 6, 8])\n", - "v4 = torch.rand(10)\n", + "x = torch.rand(3, 4, requires_grad=True)\n", + "print(x.device)\n", + "print(x.shape)\n", + "print(x.dtype)\n", + "print(x)\n", + "print(x.data)\n", + "print(x[0, 0])\n", + "print(x[0, 0].item())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Tensor data types:\n", "\n", - "print(\"v3: %s\\n\" % v3)\n", - "print(\"v4: %s\\n\" % v4)" + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "

Data type

dtype

Legacy Constructors

32-bit floating point

torch.float32 or torch.float

torch.*.FloatTensor

64-bit floating point

torch.float64 or torch.double

torch.*.DoubleTensor

64-bit complex

torch.complex64 or torch.cfloat

128-bit complex

torch.complex128 or torch.cdouble

16-bit floating point 1

torch.float16 or torch.half

torch.*.HalfTensor

16-bit floating point 2

torch.bfloat16

torch.*.BFloat16Tensor

8-bit integer (unsigned)

torch.uint8

torch.*.ByteTensor

8-bit integer (signed)

torch.int8

torch.*.CharTensor

16-bit integer (signed)

torch.int16 or torch.short

torch.*.ShortTensor

32-bit integer (signed)

torch.int32 or torch.int

torch.*.IntTensor

64-bit integer (signed)

torch.int64 or torch.long

torch.*.LongTensor

Boolean

torch.bool

torch.*.BoolTensor

\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### You can also change a value inside the array manually" + "Casting tensors accoding to regular Python rules:\n", + "```\n", + "complex > floating > integral > boolean\n", + "```\n", + "\n", + "Also, be careful with casts to the same dtypes to avoid underflow/overflow:" ] }, { @@ -115,22 +279,43 @@ "metadata": {}, "outputs": [], "source": [ - "v4[1] = 0.1\n", - "print(v4)" + "float_tensor = torch.randn(2, 2, dtype=torch.float)\n", + "int_tensor = torch.ones(1, dtype=torch.int)\n", + "long_tensor = torch.ones(1, dtype=torch.long)\n", + "uint_tensor = torch.ones(1, dtype=torch.uint8)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "long_tensor_big_number = long_tensor * 2**33\n", + "long_tensor_big_number, long_tensor_big_number.int()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "float_tensor, float_tensor.long()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### Accessing values (indexing)" + "See the full list of attributes [here](https://pytorch.org/docs/stable/tensor_attributes.html)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Individual tensor positions are scalars, or 0-dimension tensor:" + "## Examples" ] }, { @@ -139,7 +324,84 @@ "metadata": {}, "outputs": [], "source": [ - "v1 = torch.arange(10)" + "# scalar\n", + "x = torch.tensor(2)\n", + "print(x)\n", + "print(x.shape)\n", + "print(x.item()) # access the (single) element inside the tensor\n", + "print('')\n", + "\n", + "# vector\n", + "x = torch.rand(4)\n", + "print(x)\n", + "print(x.shape)\n", + "print('')\n", + "\n", + "# matrix\n", + "x = torch.rand(4, 3)\n", + "print(x)\n", + "print(x.shape)\n", + "print('')\n", + "\n", + "# n-dimensional array\n", + "x = torch.rand(3, 4, 3) # e.g., image with width=3, height=4, and channels=3\n", + "print(x)\n", + "print(x.shape)\n", + "print('')\n", + "\n", + "from matplotlib import pyplot as plt; plt.imshow(x)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Tensor operations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v1 = torch.arange(8)\n", + "v2 = torch.arange(10, 18)\n", + "\n", + "print(\"v1: %s\" % v1)\n", + "print(\"v2: %s\" % v2)\n", + "print(\"Dot product: %d\" % v1.dot(v2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### You can also change a value inside the array manually" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v2[1] = 25\n", + "print(v2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Accessing values:**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Individual tensor positions are scalars, or 0-dimension tensor:" ] }, { @@ -174,7 +436,37 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Converting" + "**Numpy-style indexing:**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m = torch.randn(3, 4, 3)\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "m[0,1,0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m[:, 1, 0]" ] }, { @@ -183,8 +475,7 @@ "metadata": {}, "outputs": [], "source": [ - "A = torch.eye(3)\n", - "A" + "m[0, :, -1]" ] }, { @@ -193,9 +484,7 @@ "metadata": {}, "outputs": [], "source": [ - "# torch --> numpy\n", - "B = A.numpy()\n", - "B" + "m[:, :, -1]" ] }, { @@ -204,8 +493,7 @@ "metadata": {}, "outputs": [], "source": [ - "# numpy --> torch\n", - "torch.from_numpy(np.eye(3))" + "m[..., -1]" ] }, { @@ -316,7 +604,85 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### Matrices" + "## Aggregating tensors" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "(x ** 2).sum().sqrt()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "x.mean(), x.std()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "x.min(), x.max()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "x.norm(p=3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Joining tensors" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "torch.cat([x, y])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "z = torch.stack([x, y])\n", + "z" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "torch.vstack([z, x])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Tensor multiplication" ] }, { @@ -337,7 +703,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Oops... that can be misleading if you are used to numpy. Instead, call `mm`" + "Oops... that can be misleading if you are used to numpy. In PyTorch, `dot` is reserved for vectors only.\n", + "For matrices, call `mm`:" ] }, { @@ -351,6 +718,13 @@ "print(m1.mm(m2))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Or the now-default-python operator for matrix multiplication `@`" + ] + }, { "cell_type": "code", "execution_count": null, @@ -364,7 +738,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "What if I have batched data? It's better to use `.bmm()`! This is a common source of errors." + "What if I have batched data? It's better to use `.bmm()` (this is a common source of error)" ] }, { @@ -420,7 +794,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "`.bmm` works with 3d tensors. We can use the more general `matmul` instead. In fact, the `@` operator is a shorthand for `matmul`." + "`.bmm` works only with 3d tensors. For higher dimensionalities, we can use the more general `matmul`. In fact, the `@` operator is a shorthand for `matmul` (which is implemented in the magic method `__matmul__` )" ] }, { @@ -493,9 +867,7 @@ "outputs": [], "source": [ "print(\"m:\", m)\n", - "print()\n", - "print(\"v:\", v)\n", - "print()" + "print(\"v:\", v)" ] }, { @@ -611,9 +983,92 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### General Broadcast Semantics\n", + "## Squeezing and Unsqueezing\n", "\n", - "See more here: https://pytorch.org/docs/master/notes/broadcasting.html" + "Broadcasting is one of the most important concepts for manipulating n-dimensional arrays. PyTorch offers some ways of expanding the rank of a tensor. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v = torch.rand(4).view(1, 4, 1)\n", + "print(v)\n", + "print(v.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v.squeeze().shape # \"compress\" all single-dimensions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v.squeeze(0).shape # \"compress\" only the (0-indexed) single-dimension" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v.unsqueeze(1).shape # \"add\" a new dimension BEFORE the (1-indexed) dimension" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# using numpy notation (better since it explicitily says where a new dimension is being created)\n", + "v[:, None].shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v.unsqueeze(1).unsqueeze(-1).unsqueeze(1).shape # what unsqueeze(1).unsqueeze(1) does?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "v[:, None, None, ..., None].shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# we can also use .view(dims) as long te specified dims are valid\n", + "v.view(1, 1, 1, 4, 1, 1).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## General Broadcast Semantics" ] }, { @@ -647,6 +1102,7 @@ "source": [ "x = torch.rand((0,))\n", "y = torch.rand(2,2)\n", + "print(x.shape)\n", "z = x + y\n", "# x and y are not broadcastable, because x does not have at least 1 dimension" ] @@ -685,9 +1141,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Always take care with tensor shapes! It is a good practice to verify in the interpreter how some expression is evaluated before inserting into your model code. \n", + "Always take care of tensor shapes! It is a good practice to debug how some expression is evaluated before inserting adding it to your codebase. \n", "\n", - "In other words, **you can use pytorch's dynamic graph creation ability to debug your model by printing tensor shapes!**" + "\n", + "\n", + "See more here: https://pytorch.org/docs/master/notes/broadcasting.html" ] }, { @@ -705,10 +1163,7 @@ "metadata": {}, "outputs": [], "source": [ - "%matplotlib inline\n", - "\n", - "import matplotlib\n", - "import matplotlib.pyplot as pl" + "import matplotlib.pyplot as plt" ] }, { @@ -736,13 +1191,6 @@ "x.shape" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `.numpy()` method converts Pytorch tensors to numpy array. It is necessary to plot with matplotlib." - ] - }, { "cell_type": "code", "execution_count": null, @@ -750,14 +1198,7 @@ "outputs": [], "source": [ "y = x.sin()\n", - "pl.plot(x.numpy(), y.numpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Hyperbolic tangent" + "plt.plot(x.numpy(), y.numpy())" ] }, { @@ -767,14 +1208,7 @@ "outputs": [], "source": [ "y = x.tanh()\n", - "pl.plot(x.numpy(), y.numpy())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "$e^x$ " + "plt.plot(x.numpy(), y.numpy())" ] }, { @@ -784,7 +1218,7 @@ "outputs": [], "source": [ "y = x.exp()\n", - "pl.plot(x.numpy(), y.numpy())" + "plt.plot(x.numpy(), y.numpy())" ] }, { @@ -868,7 +1302,10 @@ "outputs": [], "source": [ "# you can get a tensor's device via the .device attribute\n", - "res.device" + "res.device\n", + "z = torch.arange(10)\n", + "z = z.to(res.device)\n", + "print(z.device)" ] }, { @@ -879,18 +1316,14 @@ "\n", "Central to all neural networks in PyTorch is the `autograd` package. \n", "\n", - "We can say that it is the _true_ power behind PyTorch. The autograd package provides automatic differentiation for all operations on Tensors. It is a **define-by-run** framework, which means that your backprop is defined by how your code is run, and that **every single iteration can be different**.\n", - "\n", - "Refs:\n", - "- https://pytorch.org/docs/stable/autograd.html\n", - "- https://pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html" + "We can say that it is the _true_ power behind PyTorch. The autograd package provides automatic differentiation for all operations on Tensors. It is a **define-by-run** framework, which means that your backprop is defined by how your code is run, and that **every single iteration can be different**." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "`torch.Tensor` is the central class of the package. If you set its attribute `.requires_grad` as `True`, it starts to track all operations on it. When you finish your computation you can call `.backward()` and have all the gradients computed automatically. The gradient for this tensor will be accumulated into `.grad` attribute." + "`torch.Tensor` is the central class of the package. If you set its attribute `.requires_grad` as `True`, it starts to track all operations applied on it. When you finish your computation you can call `.backward()` and have all the gradients computed automatically. The gradient for this tensor will be accumulated into the `.grad` attribute." ] }, { @@ -985,7 +1418,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "To prevent tracking history (and using memory), you can also wrap the code block in with `torch.no_grad()`:. This can be particularly helpful when evaluating a model because the model may have trainable parameters with `requires_grad=True`, but for which we don’t need the gradients." + "To prevent tracking history (and using memory), you can also wrap the code block in with `torch.no_grad()`: This can be particularly helpful when evaluating a model because the model may have trainable parameters with `requires_grad=True`, but for which we don’t need the gradients." ] }, { @@ -1015,7 +1448,36 @@ "\n", "`Tensor` and `Function` are interconnected and build up an acyclic graph, that encodes a complete history of computation. Each tensor has a `.grad_fn` attribute that references a `Function` that has created the `Tensor` (except for `Tensor`s created by the user - their `grad_fn` is `None`).\n", "\n", - "====> Let's go back and see the `grad_fn` in our previous examples." + "Let's go back and see the `grad_fn` in our previous example:\n", + "```\n", + "input -> x -> Pow(2) -> y -> Exp() -> Mul(constant) -> output\n", + "```\n", + "\n", + "We can create a `Function` and manually define its gradient (this is particularly useful for originally non-differentiable operations)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "class Exp(torch.autograd.Function):\n", + " @staticmethod\n", + " def forward(ctx, i):\n", + " result = i.exp()\n", + " ctx.save_for_backward(result)\n", + " return result\n", + " \n", + " @staticmethod\n", + " def backward(ctx, grad_output):\n", + " result, = ctx.saved_tensors\n", + " return grad_output * result\n", + "\n", + "# Use it by calling the apply method:\n", + "x = torch.arange(4)\n", + "output = Exp.apply(x)\n", + "output" ] }, { @@ -1066,17 +1528,6 @@ "x.grad" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "pl.plot(x.detach(), x.grad.detach())" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1085,7 +1536,7 @@ "\n", "PyTorch's `autograd` is a very powerfull tool. For instance, it can calculate the Jacobian and Hessian of any given function! Here is a list of more advanced things that you can accomplish with `autograd`:\n", "\n", - "- Vector-Jacobian products for non-scalar outputs (e.g. when `y` is a vector)\n", + "- Vector-Jacobian products for non-scalar outputs (e.g., when `y` is a vector)\n", "- Compute Jacobian and Hessian\n", "- Retain the computation graph (useful for inspecting gradients inside a model)\n", "- Sparse gradients\n", @@ -1094,7 +1545,7 @@ "- Numerical gradient checking\n", "\n", "\n", - "More info: pytorch.org/docs/stable/autograd.html" + "More info: https://pytorch.org/docs/stable/autograd.html" ] }, { @@ -1135,15 +1586,28 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Exercise:**\n", - "\n", - "Derive the gradient $$\\frac{\\partial y}{\\partial x}$$ and make a function that computes it. Check that it gives the same as `x.grad`." + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + " Exercise: Derive the gradient \n", + "

\n", + " $$\n", + " \\dfrac{\\partial \\big[\\sum_{x_i} e^{0.001 x_i^2} + \\sin(x_i^3) \\cdot \\log(x_i)\\big]}{\\partial x}\n", + " $$\n", + "
\n", + " and make a function that computes it. Check that it gives the same output as `x.grad` in our previous example.\n", + "
" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -1157,7 +1621,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/02-linear-regression.ipynb b/02-linear-regression.ipynb index f74b15a..5fc87aa 100644 --- a/02-linear-regression.ipynb +++ b/02-linear-regression.ipynb @@ -23,7 +23,10 @@ "metadata": {}, "outputs": [], "source": [ - "%matplotlib inline" + "import numpy as np\n", + "import torch\n", + "import matplotlib.pyplot as plt\n", + "from pprint import pprint" ] }, { @@ -32,43 +35,51 @@ "metadata": {}, "outputs": [], "source": [ - "import torch\n", - "import torch.nn as nn\n", - "import torch.nn.functional as F\n", - "import torch.optim as optim\n", - "import torchvision\n", - "\n", - "DEVICE = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")" + "np.random.seed(0)\n", + "torch.manual_seed(0);" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "from pprint import pprint\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "from IPython.core.debugger import set_trace" + "## The Problem" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "np.random.seed(0)\n", - "torch.manual_seed(0)" + "Suppose that we want to predict a real-valued quantity $y \\in \\mathbb{R}$ for a given input $\\mathbf{x} \\in \\mathbb{R}^d$. This is known as **regression**. \n", + "\n", + "The most common loss function for regression is the **quadractic loss** or **$\\ell_2$ loss**:\n", + "\n", + "$$\n", + "\\ell_2(y, \\hat{y}) = (y - \\hat{y})^2\n", + "$$\n", + "\n", + "The empirical risk becomes the **mean squared error (MSE)**:\n", + "\n", + "$$\n", + "MSE(\\theta) = \\frac{1}{N} \\sum\\limits_{n=1}^{N} (y_n - f(\\mathbf{x}_n; \\theta))^2\n", + "$$\n", + "\n", + "The model $f(\\mathbf{x}_n; \\theta)$ can be parameterized in many ways. In this lecture we will focus on a linear parameterization, leading to the well-known **Linear Regression** formulation:\n", + "\n", + "$$\n", + "f(\\mathbf{x}; \\theta) = \\mathbf{w}^\\top \\mathbf{x} + b = w_1 x_1 + w_2 x_2 + \\cdots + w_D x_D + b\n", + "$$\n", + "\n", + "where $\\theta = (b, \\mathbf{w})$ are the parameters of the model." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# The Problem" + "## Example\n", + "\n", + "Let's create a synthetic regression dataset using `sklearn`'s `make_regression` function. For better visualization, we will use only a single feature.$" ] }, { @@ -95,6 +106,124 @@ "print(X.shape, y.shape)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For instance, by looking at the plot above, let's say that $w \\approx 40$ and $b \\approx 2$. Then, we would arrive at the following predictions (with vertical bars indicating the errors)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# our estimate\n", + "w = 40.0\n", + "b = 2.0\n", + "y_pred = w*X + b\n", + "\n", + "# subplots\n", + "fig, axs = plt.subplots(1, 2, figsize=(16, 4))\n", + "\n", + "# left plot\n", + "axs[0].plot(X, y, 'o')\n", + "axs[0].plot(X, y_pred, '-')\n", + "\n", + "# right plot\n", + "axs[1].vlines(X, y, y_pred, color='black')\n", + "axs[1].plot(X, y, 'o')\n", + "axs[1].plot(X, y_pred, '-')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "By adjusting our parameters $\\theta=(w, b)$, we can minimize the sum of squared errors to find the **least squares solution**\n", + "\n", + "$$\n", + "\\begin{align}\n", + "\\hat{\\theta} &= \\arg\\min_\\theta MSE(\\theta) \\\\\n", + "&= \\arg\\min_\\theta \\frac{1}{N} \\sum\\limits_{n=1}^{N} (y_n - f(\\mathbf{x}_n; \\theta))^2 \\\\\n", + "&= \\arg\\min_{w,b} \\frac{1}{N} \\sum\\limits_{n=1}^{N} (y_n - (w \\cdot x_n + b))^2\n", + "\\end{align}\n", + "$$\n", + "\n", + "Which can be found by taking the gradient of the loss function w.r.t. $\\theta$. \n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In general, for inputs with higher dimensionality $d$, we have $\\mathbf{w} \\in \\mathbb{R}^d$, and thus we have the following gradient (assuming that $b$ is absorbed by $w$):\n", + "\n", + "$$\n", + "\\begin{align}\n", + "\\nabla_\\mathbf{w} MSE(\\theta) &= \\nabla_\\mathbf{w} \\frac{1}{N} \\sum\\limits_{n=1}^{N} (y_n - f(\\mathbf{x}_n; \\theta))^2 \\\\\n", + "&= \\frac{-2}{N} \\sum\\limits_{n=1}^{N} (y_n - f(\\mathbf{x}_n; \\theta)) \\cdot \\nabla_\\mathbf{w} f(\\mathbf{x}_n; \\theta) \\\\\n", + "&= \\frac{-2}{N} \\sum\\limits_{n=1}^{N} (y_n - (\\mathbf{w}^\\top \\mathbf{x}_n + b)) \\cdot \\mathbf{x}_n\n", + "\\end{align}\n", + "$$\n", + "\n", + "Now, we just have follow the gradient descent rule to update $\\mathbf{w}$: \n", + "\n", + "$$\n", + "\\mathbf{w}_{t+1} = \\mathbf{w}_{t} - \\alpha \\nabla_{\\mathbf{w}} MSE(\\theta)\n", + "$$\n", + "\n", + "Where $\\alpha$ represents the learning rate. So, let's implement this in numpy to see what happens." + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -113,31 +242,35 @@ " self.W = np.zeros((n_targets, n_features))\n", " self.lr = lr\n", "\n", - " def update_weight(self, X, y):\n", - " m = X.shape[0]\n", - " y_hat = self.predict(X)\n", - " W_grad = 2 * np.dot(X.T, y_hat - y) / m\n", + " def update_weight(self, X, y, y_hat):\n", + " N = X.shape[0]\n", + " W_grad = - 2 * np.dot(X.T, y - y_hat) / N\n", " self.W = self.W - self.lr * W_grad\n", "\n", " def loss(self, y_hat, y):\n", - " return np.mean(np.power(y_hat - y, 2))\n", + " return np.mean(np.power(y - y_hat, 2))\n", "\n", " def predict(self, X):\n", - " y_hat = np.dot(X, self.W.T)\n", - " return y_hat.squeeze(-1)\n", + " return np.dot(X, self.W.T).squeeze(-1)\n", "\n", " def train(self, X, y, epochs=50):\n", " \"\"\"\n", - " X (n_examples x n_features):\n", + " X (n_examples x n_features): input matrix\n", " y (n_examples): gold labels\n", " \"\"\"\n", " loss_history = []\n", " for _ in range(epochs):\n", - " # for x_i, y_i in zip(X, y):\n", - " # self.update_weight(x_i, y_i)\n", - " self.update_weight(X, y)\n", + " # get prediction for computing the loss\n", " y_hat = self.predict(X)\n", " loss = self.loss(y_hat, y)\n", + "\n", + " # update weights\n", + " self.update_weight(X, y, y_hat)\n", + " # (thought exercise): what happens if we do this instead?\n", + " # for x_i, y_i in zip(X, y):\n", + " # self.update_weight(x_i, y_i)\n", + "\n", + " # save loss value\n", " loss_history.append(loss)\n", " return loss_history" ] @@ -148,7 +281,9 @@ "metadata": {}, "outputs": [], "source": [ - "use_bias = False\n", + "# trick for handling the bias term:\n", + "# concat a columns of 1s to the original input matrix X\n", + "use_bias = True\n", "if use_bias:\n", " X_np = np.hstack([np.ones((n_samples,1)), X])\n", " n_features += 1\n", @@ -173,8 +308,10 @@ "metadata": {}, "outputs": [], "source": [ + "print('b:', model.W[0,0])\n", + "print('W:', model.W[0,1])\n", "plt.plot(loss_history)\n", - "plt.title('Loss per epoch');" + "plt.title('Loss per epoch')" ] }, { @@ -184,18 +321,25 @@ "outputs": [], "source": [ "# Vis\n", - "fig, ax = plt.subplots()\n", - "ax.plot(X, y, \".\", label=\"data\")\n", - "ax.plot(X, y_hat, \".\", label=\"pred\")\n", - "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", - "ax.legend();" + "fig, axs = plt.subplots(1, 2, figsize=(16, 4))\n", + "axs[0].plot(X, y, \"o\", label=\"data\")\n", + "axs[0].plot(X, 40*X + 2, \"-\", label=\"pred\")\n", + "axs[0].set_title(\"Guess\")\n", + "axs[0].legend();\n", + "\n", + "axs[1].plot(X, y, \"o\", label=\"data\")\n", + "axs[1].plot(X, y_hat, \"-\", label=\"pred\")\n", + "axs[1].set_title(\"Numpy solution\")\n", + "axs[1].legend();" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# Numpy + Autograd Solution" + "# Numpy + Autograd Solution\n", + "\n", + "In the previous implementation, we had to derive the gradient $\\frac{\\partial MSE(\\theta)}{\\partial \\theta}$ manually. If the model $f(\\cdot;\\theta)$ is more complex, this might be a cumbersome and error-prone task. To avoid this, we will use PyTorch `autograd` to automatically compute gradients.\n" ] }, { @@ -206,7 +350,8 @@ "source": [ "class MixedLinearRegression(object):\n", " def __init__(self, n_features, n_targets=1, lr=0.01):\n", - " self.W = torch.zeros(n_targets, n_features, requires_grad=True) # note requires_grad=True!\n", + " # note requires_grad=True!\n", + " self.W = torch.zeros(n_targets, n_features, requires_grad=True)\n", " self.lr = lr\n", " \n", " def update_weight(self):\n", @@ -214,20 +359,18 @@ " self.W.data = self.W.data - self.lr * self.W.grad.data\n", "\n", " def loss(self, y_hat, y):\n", - " return torch.mean(torch.pow(y_hat - y, 2))\n", + " return torch.mean(torch.pow(y - y_hat, 2))\n", "\n", " def predict(self, X):\n", - " y_hat = torch.matmul(X, self.W.t())\n", - " return y_hat.squeeze(-1)\n", + " return torch.matmul(X, self.W.t()).squeeze(-1)\n", "\n", " def train(self, X, y, epochs=50):\n", " \"\"\"\n", - " X (n_examples x n_features):\n", + " X (n_examples x n_features): input matrix\n", " y (n_examples): gold labels\n", " \"\"\"\n", " loss_history = []\n", " for _ in range(epochs):\n", - " \n", " # Our neural net is a Line function!\n", " y_hat = self.predict(X)\n", " \n", @@ -235,10 +378,11 @@ " loss = self.loss(y_hat, y)\n", " \n", " # Computes the gradient of loss with respect to all Variables with requires_grad=True.\n", + " # where Variables are tensors with requires_grad=True\n", " loss.backward()\n", " loss_history.append(loss.item())\n", "\n", - " # Update a and b using gradient descent; a.data and b.data are Tensors.\n", + " # Update weights using gradient descent; W.data is a Tensor.\n", " self.update_weight()\n", "\n", " # Reset the accumulated gradients\n", @@ -275,6 +419,8 @@ "metadata": {}, "outputs": [], "source": [ + "print('b:', model.W[0,0].item())\n", + "print('W:', model.W[0,1].item())\n", "plt.plot(loss_history)\n", "plt.title('Loss per epoch');" ] @@ -286,28 +432,32 @@ "outputs": [], "source": [ "# Vis\n", - "fig, ax = plt.subplots()\n", - "ax.plot(X_pt.cpu().numpy(), y_pt.cpu().numpy(), \".\", label=\"data\")\n", - "ax.plot(X_pt.cpu().numpy(), y_hat.cpu().numpy(), \".\", label=\"pred\")\n", - "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", - "ax.legend();" + "fig, axs = plt.subplots(1, 3, figsize=(16, 4))\n", + "axs[0].plot(X, y, \"o\", label=\"data\")\n", + "axs[0].plot(X, 40*X + 2, \"-\", label=\"pred\")\n", + "axs[0].set_title(\"Guess\")\n", + "axs[0].legend();\n", + "\n", + "axs[1].plot(X, y, \"o\", label=\"data\")\n", + "axs[1].plot(X, 47.12483907744531*X + 2.3264433961431727, \"-\", label=\"pred\")\n", + "axs[1].set_title(\"Numpy solution\")\n", + "axs[1].legend();\n", + "\n", + "axs[2].plot(X, y, \"o\", label=\"data\")\n", + "axs[2].plot(X, y_hat, \"-\", label=\"pred\")\n", + "axs[2].set_title(\"Mixed solution\")\n", + "axs[2].legend();" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# PyTorch Solution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "X = torch.from_numpy(X).float()\n", - "y = torch.from_numpy(y.reshape((n_samples, n_features))).float()" + "# PyTorch Solution\n", + "\n", + "Mixing PyTorch and Numpy is no fun. PyTorch is actually very powerful and provides most of the things we need to apply gradient descent for any model $f$, as long all operations applied over the inputs are Torch operations (so gradients can be tracked). \n", + "\n", + "To this end, we will use the submodule `torch.nn`, which provides us a way for encapsulating our model into a `nn.Module`. With this, all we need to do is define the our parameters in the `__init__` method and then the _forward_ pass of our model in the `forward` method. " ] }, { @@ -316,13 +466,28 @@ "metadata": {}, "outputs": [], "source": [ - "class LinReg(nn.Module):\n", - " def __init__(self, input_dim):\n", - " super().__init__()\n", - " self.beta = nn.Linear(input_dim, 1)\n", + "from torch import nn\n", + "from torch import optim\n", + "\n", + "# See the inheritance from nn.Module\n", + "class TorchLinearRegression(nn.Module):\n", + " \n", + " def __init__(self, n_features, n_targets=1):\n", + " super().__init__() # this is mandatory!\n", + " \n", + " # encapsulate our weights into a nn.Parameter object\n", + " self.W = torch.nn.Parameter(torch.zeros(n_targets, n_features))\n", "\n", " def forward(self, X):\n", - " return self.beta(X)" + " \"\"\"\n", + " X (n_examples x n_features): input matrix\n", + " \"\"\"\n", + " #if self.training:\n", + " # X = X ** 2\n", + " #else:\n", + " # X = X ** 3\n", + " # import ipdb; ipdb. set_trace()\n", + " return X @ self.W.t()" ] }, { @@ -332,12 +497,22 @@ "outputs": [], "source": [ "# define model, loss function and optmizer\n", - "model = LinReg(n_features).to(DEVICE) # <-- here\n", + "model = TorchLinearRegression(n_features)\n", "loss_fn = nn.MSELoss()\n", "optimizer = optim.SGD(model.parameters(), lr=0.1)\n", "\n", "# move to CUDA if available\n", - "X, y = X.to(DEVICE), y.to(DEVICE) # <-- here" + "device = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "model = model.to(device)\n", + "X = X_pt.to(device)\n", + "y = y_pt.to(device).unsqueeze(-1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All done! Now we just have to write a training loop, which is more or less a standard set of steps for training all models:" ] }, { @@ -347,17 +522,23 @@ "outputs": [], "source": [ "def train(model, X, y, epochs=50):\n", - " model.train() # <-- here\n", + " # inform PyTorch that we are in \"training\" mode\n", + " model.train()\n", + " \n", " loss_history = []\n", " for _ in range(epochs):\n", + " # reset gradients before learning\n", " optimizer.zero_grad()\n", - "\n", - " y_ = model(X)\n", - " loss = loss_fn(y_, y)\n", " \n", + " # get predictions and and the final score from the loss function \n", + " y_hat = model(X)\n", + " loss = loss_fn(y_hat, y)\n", " loss_history.append(loss.item())\n", - "\n", + " \n", + " # compute gradients of the loss wrt parameters\n", " loss.backward()\n", + " \n", + " # perform gradient step to update the parameters\n", " optimizer.step()\n", "\n", " return loss_history" @@ -370,10 +551,15 @@ "outputs": [], "source": [ "def evaluate(model, X):\n", - " model.eval() # <-- here\n", + " # inform PyTorch that we are in \"evaluation\" mode\n", + " model.eval()\n", + " \n", + " # disable gradient tracking\n", " with torch.no_grad():\n", - " y_ = model(X) \n", - " return y_" + " # get prediction\n", + " y_hat = model(X)\n", + " \n", + " return y_hat" ] }, { @@ -392,6 +578,8 @@ "metadata": {}, "outputs": [], "source": [ + "print('b:', model.W[0,0].item())\n", + "print('W:', model.W[0,1].item())\n", "plt.plot(loss_history)\n", "plt.title('Loss per epoch');" ] @@ -403,39 +591,61 @@ "outputs": [], "source": [ "# Vis\n", - "fig, ax = plt.subplots()\n", - "ax.plot(X.cpu().numpy(), y.cpu().numpy(), \".\", label=\"data\")\n", - "ax.plot(X.cpu().numpy(), y_hat.cpu().numpy(), \".\", label=\"pred\")\n", - "ax.set_title(f\"MSE: {loss_history[-1]:0.1f}\")\n", - "ax.legend();" + "X = X_pt[:, 1:].numpy()\n", + "y = y_pt.squeeze(-1).numpy()\n", + "\n", + "fig, axs = plt.subplots(1, 4, figsize=(16, 4))\n", + "axs[0].plot(X, y, \"o\", label=\"data\")\n", + "axs[0].plot(X, 40*X + 2, \"-\", label=\"pred\")\n", + "axs[0].set_title(\"Guess\")\n", + "axs[0].legend();\n", + "\n", + "axs[1].plot(X, y, \"o\", label=\"data\")\n", + "axs[1].plot(X, 47.12483907744531*X + 2.3264433961431727, \"-\", label=\"pred\")\n", + "axs[1].set_title(\"Numpy solution\")\n", + "axs[1].legend();\n", + "\n", + "axs[2].plot(X, y, \"o\", label=\"data\")\n", + "axs[2].plot(X, 47.12483596801758*X + 2.3264429569244385, \"-\", label=\"pred\")\n", + "axs[2].set_title(\"Mixed solution\")\n", + "axs[2].legend();\n", + "\n", + "axs[3].plot(X, y, \"o\", label=\"data\")\n", + "axs[3].plot(X, y_hat, \"-\", label=\"pred\")\n", + "axs[3].set_title(\"PyTorch solution\")\n", + "axs[3].legend();" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Note: I did gradient descent with all the data. I did not split the data into `train` and `valid` which should be done!" + "**Note:** I did gradient descent with the entire dataset rather than splitting the data into `train` and `valid` subsets, which should be done in practice!" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Exercise" + "## Exercises" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "- Write a proper training loop for PyTorch." + "- Write a proper training loop for PyTorch:\n", + " - add support for batches\n", + " - add a stop criterion for the convergence of the model\n", + " \n", + "- Add L2 regularization" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -449,7 +659,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/03-modules-and-mlps.ipynb b/03-modules-and-mlps.ipynb index b8f0639..e6822d5 100644 --- a/03-modules-and-mlps.ipynb +++ b/03-modules-and-mlps.ipynb @@ -8,13 +8,13 @@ "\n", "A typical training procedure for a neural net:\n", "\n", - "0. Define a dataset (what is X and Y)\n", + "0. Define a dataset ($X$ and $Y$)\n", "1. Define the neural network with some learnable weights\n", - "2. Iterate over a dataset of inputs\n", - "3. Pass inputs to the network (forward)\n", + "2. Iterate over the dataset\n", + "3. Pass inputs to the network (forward pass)\n", "4. Compute the loss\n", - "5. Compute gradients w.r.t. network's weights\n", - "6. Update weights (e.g. weight = weight - lr * gradient)\n", + "5. Compute gradients w.r.t. network's weights (backward pass)\n", + "6. Update weights (e.g., weight = weight - lr * gradient)\n", "\n", "PyTorch handles 1-6 for you via encapsulation, so you still have the flexibility to change something in between if you want! " ] @@ -23,20 +23,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Read the data\n", + "## Example: MNIST classifier\n", "\n", - "We will download the MNIST dataset for training a classifier. Torch provides a convenient function for that.\n", + "The MNIST dataset is composed of images of digits that must be classified with labels from 0 to 9. The inputs are 28x28 matrices containing the grayscale intensity in each pixel.\n", "\n", - "The MNIST dataset is composed of images of digits that must be classified with labels from 0 to 9. The inputs are 28x28 matrices containing the grayscale intensity in each pixel." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%matplotlib inline" + "We will download the MNIST dataset for training a classifier. PyTorch provides a convenient function for that." ] }, { @@ -47,32 +38,9 @@ "source": [ "import torch\n", "import torch.nn as nn\n", - "import torch.nn.functional as F\n", "import torch.optim as optim\n", - "import torchvision\n", - "from torchvision import datasets" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from pprint import pprint\n", - "\n", + "from torchvision import datasets\n", "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "from IPython.core.debugger import set_trace" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "np.random.seed(0)\n", "torch.manual_seed(0);" ] }, @@ -82,38 +50,10 @@ "source": [ "# Dataset\n", "It's easy to create your `Dataset`,\n", - "but PyTorch comes with some\n", - "[built-in datasets](https://pytorch.org/docs/stable/torchvision/datasets.html):\n", + "but PyTorch comes with several built-in datasets for [vision](https://pytorch.org/vision/stable/datasets.html), [audio](https://pytorch.org/audio/stable/datasets.html), and [text](https://pytorch.org/text/stable/datasets.html) modalities.\n", "\n", - "- MNIST\n", - "- Fashion-MNIST\n", - "- KMNIST\n", - "- EMNIST\n", - "- FakeData\n", - "- COCO\n", - " - Captions\n", - " - Detection\n", - "- LSUN\n", - "- ImageFolder\n", - "- DatasetFolder\n", - "- Imagenet-12\n", - "- CIFAR\n", - "- STL10\n", - "- SVHN\n", - "- PhotoTour\n", - "- SBU\n", - "- Flickr\n", - "- VOC\n", - "- Cityscapes\n", + "The class `Dataset` gives you information about the number of samples (implement `__len__`) and gives you the sample at a given index (implement `__getitem__`). It's a nice and simple abstraction to work with data. It has the following structure:\n", "\n", - "`Dataset` gives you information about the number of samples (implement `__len__`) and gives you the sample at a given index (implement `__getitem__`).\n", - "It's a nice and simple abstraction to work with data." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ "```python\n", "class Dataset(object):\n", " def __getitem__(self, index):\n", @@ -126,7 +66,7 @@ " return ConcatDataset([self, other])\n", "```\n", "\n", - "For now, let's use MNIST. You'll have an example on how to use `Dataset` in your next homework." + "For now, let's use MNIST. But feel free to use another `Dataset` as an exercise." ] }, { @@ -163,15 +103,15 @@ "source": [ "n_train_examples = train_x.shape[0]\n", "n_test_examples = test_x.shape[0]\n", - "\n", - "print('%d training instances and %d test instances' % (n_train_examples, n_test_examples))" + "print('Training instances:', n_train_examples)\n", + "print('Test instances:', n_test_examples)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Check the shape of our training data to see how many input features there are:" + "Check the shape of our training data to see how many input features we have:" ] }, { @@ -180,23 +120,31 @@ "metadata": {}, "outputs": [], "source": [ - "print(train_x.shape)" + "train_x.shape, train_y.shape" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "And what the image looks like:" + "And what the images looks like:" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "scrolled": true + }, "outputs": [], "source": [ - "plt.imshow(train_x[0])" + "C = 8\n", + "fig, axs = plt.subplots(3, C, figsize=(12, 4))\n", + "for i in range(3):\n", + " for j in range(C):\n", + " axs[i, j].imshow(train_x[i*C + j], cmap='gray')\n", + " axs[i, j].set_axis_off()\n", + "print(train_y[:24].reshape(3, C))" ] }, { @@ -210,7 +158,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Each sample is a 28x28 matrix. But we want to represent them as vectors, since our model (a simple MLP) doesn't take any advantage of the 2-d nature of the data.\n", + "Each sample is a 28x28 matrix. But we want to represent them as vectors, since our model (which will be a simple MLP) doesn't take any advantage of the 2D nature of the data.\n", "\n", "So, we reshape the data:" ] @@ -222,8 +170,7 @@ "outputs": [], "source": [ "num_features = 28 * 28\n", - "new_shape = [n_train_examples, num_features]\n", - "train_x_vectors = train_x.reshape(new_shape)\n", + "train_x_vectors = train_x.view(n_train_examples, num_features)\n", "print(train_x_vectors.shape)" ] }, @@ -240,53 +187,17 @@ "metadata": {}, "outputs": [], "source": [ - "train_x_vectors = train_x.view(-1, num_features)\n", + "train_x_vectors = train_x.view(n_train_examples, -1)\n", "test_x_vectors = test_x.view(n_test_examples, -1)\n", - "print(train_x_vectors.shape, test_x_vectors.shape)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Also, the values are integers in the range $[0, 255]$. It is better to work with float values in a smaller interval, such as $[0, 1]$ or $[-1, 1]$. There are some more elaborate normalization techniques, but for now let's just normalize it to $[0, 1]$." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "train_x_norm = train_x_vectors / 255\n", - "test_x_norm = test_x_vectors / 255\n", - "print(train_x_norm[0])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Oops! Notice that the arrays had integer values, but the result of the division would be floats. One way to change the `dtype` of a torch tensor is using `.to(torch.dtype)`. Check here for the complete list of supported data types: https://pytorch.org/docs/stable/tensors.html\n", "\n", - "Keep in mind that data type is a common source of errors!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "train_x_vectors = train_x_vectors.to(torch.float)\n", - "test_x_vectors = test_x_vectors.to(torch.float)" + "print(train_x_vectors.shape, test_x_vectors.shape)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Let's try again:" + "Also, the values are integers in the range $[0, 255]$. It is better to work with float values in a smaller interval, such as $[0, 1]$ or $[-1, 1]$. There are some more elaborate normalization techniques, but for now let's just normalize the data into $[0, 1]$." ] }, { @@ -295,8 +206,8 @@ "metadata": {}, "outputs": [], "source": [ - "train_x_norm = train_x_vectors / 255\n", - "test_x_norm = test_x_vectors / 255\n", + "train_x_norm = train_x_vectors / 255.0\n", + "test_x_norm = test_x_vectors / 255.0\n", "print(train_x_norm.max(), train_x_norm.min(), train_x_norm.mean(), train_x_norm.std())" ] }, @@ -304,16 +215,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now, check the labels:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(train_y[:20])" + "Now, let's check all the available labels:" ] }, { @@ -323,25 +225,8 @@ "outputs": [], "source": [ "print(torch.unique(train_y))\n", - "num_classes = len(torch.unique(train_y))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "train_x_norm.shape" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "train_y.shape" + "num_classes = len(torch.unique(train_y))\n", + "print('Num classes:', num_classes)" ] }, { @@ -350,7 +235,7 @@ "source": [ "# Modules and MLPs\n", "\n", - "We've seen how the internals of simple linear classifier work. However, we still had to set a lot of things manually. It's much better to have a higher-level API that encapsulates the classifier.\n", + "We've seen how the internals of a simple linear classifier work. However, we still had to set a lot of things manually. It's much better to have a higher-level API that encapsulates the classifier.\n", "\n", "We are going to see that now, with pytorch Module objects. Then, it will allow us to build more complex models, like a multilayer perceptron." ] @@ -359,7 +244,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We begin by loading, reshaping and normalizing the data again:" + "We begin by loading, reshaping and normalizing the data again (so the code looks concise):" ] }, { @@ -368,13 +253,10 @@ "metadata": {}, "outputs": [], "source": [ - "import torch\n", - "import numpy as np\n", - "from torchvision import datasets\n", - "from matplotlib import pyplot as pl\n", + "from torchvision.transforms import ToTensor\n", "\n", - "train_dataset = datasets.MNIST('../data', train=True, download=True, transform=torchvision.transforms.ToTensor())\n", - "test_dataset = datasets.MNIST('../data', train=False, transform=torchvision.transforms.ToTensor())\n", + "train_dataset = datasets.MNIST('../data', train=True, download=True, transform=ToTensor())\n", + "test_dataset = datasets.MNIST('../data', train=False, transform=ToTensor())\n", "\n", "train_x = train_dataset.data\n", "train_y = train_dataset.targets\n", @@ -382,14 +264,14 @@ "test_y = test_dataset.targets\n", "\n", "num_features = 28 * 28\n", - "num_classes = len(np.unique(train_y))\n", + "num_classes = len(torch.unique(train_y))\n", "new_shape = [-1, num_features]\n", "train_x_vectors = train_x.reshape(new_shape)\n", "test_x_vectors = test_x.reshape(new_shape)\n", "\n", "# shorten the names\n", - "train_x = train_x_vectors.to(torch.float) / 255\n", - "test_x = test_x_vectors.to(torch.float) / 255" + "train_x = train_x_vectors.float() / 255\n", + "test_x = test_x_vectors.float() / 255" ] }, { @@ -415,6 +297,9 @@ " self.linear_layer = nn.Linear(n_features, n_classes)\n", " \n", " def forward(self, X):\n", + " # This is the same as doing:\n", + " # return X @ self.linear_layer.weight.t() + self.linear_layer.bias\n", + " # where weight and bias are instances of nn.Parameter\n", " return self.linear_layer(X)\n", "\n", "linear_model = LinearModel(num_features, num_classes)" @@ -424,7 +309,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The model can be called as function to compute an output. Let's see how it works:" + "As before, the model can be called as function in order to produce an output:" ] }, { @@ -434,9 +319,8 @@ "outputs": [], "source": [ "batch = train_x[:2]\n", - "\n", - "answers = linear_model(batch)\n", - "answers" + "outputs = linear_model(batch)\n", + "outputs" ] }, { @@ -473,7 +357,9 @@ "source": [ "#### Batching\n", "\n", - "Batching can be boring to code. `DataLoader` helps!" + "Batching can be boring to code. PyTorch provides the `DataLoader` class to help us! Dealing with data is one of the most important yet more time consuming tasks. Take a look in the PyTorch `data` submodule to [learn more](https://pytorch.org/docs/stable/data.html).\n", + "\n", + "In general, we just have to pass a torch `Dataset` object as input to the dataloader, and then set some hyperparams for the iterator: " ] }, { @@ -483,6 +369,7 @@ "outputs": [], "source": [ "from torch.utils.data import DataLoader\n", + "print(type(train_dataset))\n", "\n", "train_dataloader = DataLoader(train_dataset, batch_size=64, shuffle=True)" ] @@ -493,8 +380,8 @@ "source": [ "#### Loss\n", "\n", - "Here is the complete list of available loss functions: https://pytorch.org/docs/stable/nn.html#loss-functions\n", - "If the provided loss functions don't satisfy your constraints, it is easy to define your own loss function! Here is a simple example of how it works" + "Here is the complete list of available [loss functions](https://pytorch.org/docs/stable/nn.html#loss-functions).\n", + "If the provided loss functions don't satisfy your constraints, it is easy to define your own loss function: just use torch operations (and be careful with differentiability issues). For example:" ] }, { @@ -510,14 +397,37 @@ " # try other losses!\n", " # multi-class classification hinge loss (margin-based loss):\n", " # dummy_loss = nn.MultiMarginLoss() \n", - " \n", " batch = train_x[:2]\n", " targets = train_y[:2]\n", " predictions = linear_model(batch)\n", " \n", " print(predictions.shape, targets.shape)\n", - " print(dummy_loss(predictions, targets))\n", - " " + " print(dummy_loss(predictions, targets))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And writing our own function (from the definition of the Cross Entropy loss):\n", + "\n", + "$$\n", + "CE(p,y) = - \\log\\frac{\\exp(p_y)}{\\sum_c \\exp(p_c)}\n", + "$$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def dummy_loss(y_pred, y):\n", + " one_hot = y.unsqueeze(1) == torch.arange(num_classes).unsqueeze(0)\n", + " res = - torch.log(torch.exp(y_pred) / torch.exp(y_pred).sum(-1).unsqueeze(-1))[one_hot]\n", + " return res.mean() # average per sample\n", + "\n", + "print(dummy_loss(predictions, targets))" ] }, { @@ -544,7 +454,7 @@ "\n", "The optimizer is the object which handles the update of the model's parameters. In the previous exercise, we were using the famous \"delta\" rule to update our weights:\n", "\n", - "$$W_t = W_{t-1} - \\alpha \\frac{\\partial L}{\\partial W}.$$\n", + "$$\\mathbf{w}_t = \\mathbf{w}_{t-1} - \\alpha \\frac{\\partial L}{\\partial \\mathbf{w}}.$$\n", "\n", "But there are more ellaborate ways of updating our parameters: \n", "\n", @@ -555,7 +465,7 @@ "\n", "PyTorch provides an extensive list of optimizers: https://pytorch.org/docs/stable/optim.html. Notice that, as everything else, it should be easy to define your own optimizer procedure. \n", "\n", - "In this lecture we will use the simple yet powerful SGD optmizer. The optimizer needs to be told which are the parameters to optimize." + "We will use the simple yet powerful SGD optmizer. The optimizer needs to be told which are the parameters to optimize." ] }, { @@ -564,9 +474,8 @@ "metadata": {}, "outputs": [], "source": [ - "learning_rate = 0.1\n", "parameters = linear_model.parameters() # we will optimize all model's parameters!\n", - "optimizer = torch.optim.SGD(parameters, lr=learning_rate, momentum=0.9)" + "optimizer = torch.optim.SGD(parameters, lr=0.1)" ] }, { @@ -575,7 +484,7 @@ "source": [ "#### Training loop\n", "\n", - "Now we write the main training loop. This is the basic skeleton for training pytorch models." + "Now we write the main training loop. This is the basic skeleton for training PyTorch models." ] }, { @@ -584,24 +493,29 @@ "metadata": {}, "outputs": [], "source": [ - "def train_model(model, train_dataloader, num_epochs, optimizer):\n", + "def train_model(model, dataloader, optimizer, loss_function, num_epochs=1):\n", " # Tell PyTorch that we are in training mode.\n", " # This is useful for mechanisms that work differently during training and test time, like Dropout. \n", " model.train()\n", " \n", " losses = []\n", - " \n", " for epoch in range(1, num_epochs+1):\n", " print('Starting epoch %d' % epoch)\n", " total_loss = 0\n", " hits = 0\n", "\n", - " for batch_x, batch_y in train_dataloader:\n", + " for batch_x, batch_y in dataloader:\n", + " # check shapes with:\n", + " # import ipdb; ipdb.set_trace()\n", + " # batch_x.shape is (batch_size, 28, 28)\n", + " # batch_y.shape is (batch_size, )\n", + " \n", " # Step 1. Remember that PyTorch accumulates gradients.\n", - " # We need to clear them out before each instance\n", + " # We need to clear them out before each step\n", " optimizer.zero_grad()\n", " \n", - " # Step 2. Get the data for this batch\n", + " # Step 2. Preprocess the data\n", + " # (batch_size, 28, 28) -> (batch_size, 784 = 28 * 28)\n", " batch_x = batch_x.reshape(batch_x.shape[0], -1)\n", " batch_x = batch_x.to(torch.float) / 255.0\n", "\n", @@ -614,7 +528,7 @@ " # Step 5. Compute gradeints\n", " loss.backward()\n", " \n", - " # Step 6. After determining the gradients, take a step toward their direction\n", + " # Step 6. After determining the gradients, take a step toward their (neg-)direction\n", " optimizer.step()\n", " \n", " # Optional. Save statistics of your training\n", @@ -623,14 +537,14 @@ " losses.append(loss_value)\n", " y_pred = logits.argmax(dim=1)\n", " hits += torch.sum(y_pred == batch_y).item()\n", - "\n", + " \n", " avg_loss = total_loss / len(train_dataloader.dataset)\n", " print('Epoch loss: %.4f' % avg_loss)\n", " acc = hits / len(train_dataloader.dataset)\n", " print('Epoch accuracy: %.4f' % acc)\n", " \n", " print('Done!')\n", - " return np.array(losses)" + " return losses" ] }, { @@ -639,14 +553,14 @@ "metadata": {}, "outputs": [], "source": [ - "linear_losses = train_model(linear_model, train_dataloader, 10, optimizer)" + "linear_losses = train_model(linear_model, train_dataloader, optimizer, loss_function, num_epochs=10)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Graphics are good to understand the performance of a model. Let's plot the loss curve by batch:" + "Graphics are good to understand the performance of a model. Let's plot the loss curve by training step:" ] }, { @@ -656,8 +570,9 @@ "outputs": [], "source": [ "fig, ax = plt.subplots()\n", - "ax.plot(linear_losses, \".\", label=\"linear\")\n", - "ax.legend()" + "ax.plot(linear_losses, \"-\")\n", + "ax.set_xlabel('Step')\n", + "ax.set_ylabel('Loss');" ] }, { @@ -682,8 +597,6 @@ "metadata": {}, "outputs": [], "source": [ - "hidden_size = 200\n", - "\n", "class MLP(nn.Module):\n", " def __init__(self, n_features, hidden_size, n_classes):\n", " super().__init__()\n", @@ -701,18 +614,17 @@ " def forward(self, X):\n", " return self.feedforward(X)\n", "\n", + "hidden_size = 200\n", "mlp = MLP(num_features, hidden_size, num_classes)\n", "loss_function = nn.CrossEntropyLoss()\n", - "optimizer = torch.optim.SGD(mlp.parameters(), lr=0.1, momentum=0.9)" + "optimizer = torch.optim.SGD(mlp.parameters(), lr=0.1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Now let's train the model. How do the loss and accuracy compare with the linear model?\n", - "\n", - "You probably also noticed a difference in running time!" + "Now let's train the model." ] }, { @@ -721,14 +633,16 @@ "metadata": {}, "outputs": [], "source": [ - "mlp_losses = train_model(mlp, train_dataloader, 5, optimizer)" + "mlp_losses = train_model(mlp, train_dataloader, optimizer, loss_function, num_epochs=5)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Notice the different concentration of dots in the MLP and Linear graphics!" + "How do the loss and accuracy compare with the linear model?\n", + "\n", + "You probably also noticed a difference in running time!" ] }, { @@ -743,6 +657,13 @@ "ax.legend()" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note the different concentration of dots in the MLP and Linear graphics!" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -771,7 +692,7 @@ " y_pred = logits.argmax(dim=1)\n", " hits = torch.sum(y_pred == test_y).item()\n", " \n", - " return loss / len(test_x), hits / len(test_x)" + " return loss.item() / len(test_x), hits / len(test_x)" ] }, { @@ -855,7 +776,7 @@ "p_dropout = 0.5\n", "mlp_dropout = MLPDropout(num_features, hidden_size, num_classes, p_dropout)\n", "loss_function = nn.CrossEntropyLoss()\n", - "optimizer = torch.optim.SGD(mlp_dropout.parameters(), lr=0.1, momentum=0.9) # weight_decay" + "optimizer = torch.optim.SGD(mlp_dropout.parameters(), lr=0.1)" ] }, { @@ -864,7 +785,7 @@ "metadata": {}, "outputs": [], "source": [ - "losses = train_model(mlp_dropout, train_dataloader, 3, optimizer)" + "losses = train_model(mlp_dropout, train_dataloader, optimizer, loss_function, num_epochs=5)" ] }, { @@ -898,9 +819,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "No improvement. Ideally, we should retrain our model with different hyperparamters (learning rates, layer sizes, number of layers, dropout rate) as well as some changes in the structure (different optimizers, activation functions, losses).\n", + "No improvement. Ideally, we should retrain our model with different hyperparamters (learning rates, layer sizes, number of layers, dropout rate) as well as some changes in the structure (different optimizers, activation functions, losses). However, data representation plays a key role. \n", "\n", - "However, data representation plays a key role. Do you think representing the input as independent pixels is a good idea for recognizing digits?" + "
\n", + "
\n", + "Do you think representing the input as independent pixels is a good idea for recognizing digits?\n", + "
" ] }, { @@ -909,9 +833,7 @@ "source": [ "### Saving\n", "\n", - "Persisting the model after training is obviously important to reuse it later.\n", - "\n", - "In Pytorch, we can save the model calling `save()` and passing the model's `state_dict`." + "Persisting the model after training is obviously important to reuse it later. In Pytorch, we can save the model calling `save()` and passing the model's `state_dict` (a Python dict that maps all parameters name to their actual tensors)." ] }, { @@ -972,13 +894,14 @@ "source": [ "### Exercises\n", "\n", - "- Run the MLP example for more epochs" + "- Try running the MLP example for more epochs\n", + "- Try using CNNs: https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -992,7 +915,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/04-optional-word2vec.ipynb b/04-optional-word2vec.ipynb index 71fa126..8c2fe71 100644 --- a/04-optional-word2vec.ipynb +++ b/04-optional-word2vec.ipynb @@ -6,7 +6,14 @@ "source": [ "# Word2Vec\n", "\n", - "\"Word2vec is a technique for natural language processing. The word2vec algorithm uses a neural network model to learn word associations from a large corpus of text. Once trained, such a model can detect synonymous words or suggest additional words for a partial sentence. As the name implies, word2vec represents each distinct word with a particular list of numbers called a vector. The vectors are chosen carefully such that a simple mathematical function (the cosine similarity between the vectors) indicates the level of semantic similarity between the words represented by those vectors.\" [https://en.wikipedia.org/wiki/Word2vec]\n", + "> \"Word2vec is a technique for natural language processing. The word2vec algorithm uses a neural network model to learn word associations from a large corpus of text. Once trained, such a model can detect synonymous words or suggest additional words for a partial sentence. As the name implies, word2vec represents each distinct word with a particular list of numbers called a vector. The vectors are chosen carefully such that a simple mathematical function (the cosine similarity between the vectors) indicates the level of semantic similarity between the words represented by those vectors.\" [ https://en.wikipedia.org/wiki/Word2vec ]\n", + "\n", + "\n", + "There are two Word2Vec architectures: \n", + "\n", + "- **CBOW (Continuous Bag-of-Words)** predicts the central word from the sum of context vectors. This simple sum of word vectors is called \"bag of words\", which gives the name for the model.\n", + "\n", + "- **Skip-Gram** predicts context words given the central word. Skip-Gram with negative sampling is the most popular approach.\n", "\n", "Here we will build a PyTorch model that implements Word2Vec's CBOW strategy." ] @@ -19,37 +26,38 @@ ] }, { - "cell_type": "code", - "execution_count": 1, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "import torch\n", - "import torch.nn as nn\n", - "import torch.nn.functional as F\n", - "import torch.optim as optim" + "## What we can do with it?\n", + "\n", + "To calculate the proximity of words, usually the cosine or euclidean distances between vectors are used. Using word embeddings, we can build semantic proportions (also known as analogies) and solve examples like:\n", + "\n", + "$$\n", + "\\textit{king: male = queen: female} \\\\\n", + "\\Downarrow \\\\\n", + "\\textit{king - man + woman = queen}\n", + "$$\n", + "\n", + "" ] }, { - "cell_type": "code", - "execution_count": 2, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "from pprint import pprint\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "from IPython.core.debugger import set_trace" + "## Implementing Word2vec CBOW" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "np.random.seed(0)\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", "torch.manual_seed(0);" ] }, @@ -62,20 +70,21 @@ "class CBOW(nn.Module):\n", "\n", " def __init__(self, vocab_size, emb_size):\n", - " self.embeddings = nn.Embedding(vocab_size, emb_size)\n", - " self.lin_out = nn.Linear(emb_size, vocab_size)\n", + " super().__init__()\n", + " self.word_emb = nn.Embedding(vocab_size, emb_size)\n", + " self.linear = nn.Linear(emb_size, vocab_size)\n", "\n", " def forward(self, x):\n", - " # (bs, context_size) -> (bs, context_size, emb_dim)\n", - " x = self.emb(x)\n", + " # (batch_size, context_size) -> (batch_size, context_size, emb_dim)\n", + " x = self.word_emb(x)\n", " \n", - " # (bs, context_size, emb_dim) -> (bs, emb_dim)\n", + " # (batch_size, context_size, emb_dim) -> (batch_size, emb_dim)\n", " x = x.sum(dim=1)\n", "\n", - " # (bs, emb_dim) -> (bs, vocab_size)\n", - " logits = self.lin_out(x)\n", + " # (batch_size, emb_dim) -> (batch_size, vocab_size)\n", + " logits = self.linear(x)\n", "\n", - " return logits" + " return torch.log_softmax(logits, dim=-1)" ] }, { @@ -87,138 +96,242 @@ ] }, { - "cell_type": "code", - "execution_count": 4, + "cell_type": "markdown", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['We', 'are', 'about', 'to', 'study', 'the', 'idea', 'of', 'a', 'computational', 'process.']\n" - ] - } - ], "source": [ - "raw_text = \"\"\"We are about to study the idea of a computational process.\n", - "Computational processes are abstract beings that inhabit computers.\n", - "As they evolve, processes manipulate other abstract things called data.\n", - "The evolution of a process is directed by a pattern of rules\n", - "called a program. People create programs to direct processes. In effect,\n", - "we conjure the spirits of the computer with our spells.\"\"\".split()\n", - "print(raw_text[:11])" + "## Data" ] }, { "cell_type": "code", - "execution_count": 19, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "51\n", - "{'': 0, '': 1, 'they': 2, 'study': 3, 'about': 4, 'the': 5, 'beings': 6, 'by': 7, 'effect,': 8, 'to': 9, 'our': 10, 'spells.': 11, 'The': 12, 'process.': 13, 'processes.': 14, 'are': 15, 'evolve,': 16, 'rules': 17, 'a': 18, 'things': 19, 'People': 20, 'is': 21, 'computers.': 22, 'Computational': 23, 'we': 24, 'computer': 25, 'evolution': 26, 'manipulate': 27, 'As': 28, 'program.': 29, 'of': 30, 'processes': 31, 'inhabit': 32, 'We': 33, 'abstract': 34, 'direct': 35, 'with': 36, 'programs': 37, 'conjure': 38, 'In': 39, 'directed': 40, 'other': 41, 'process': 42, 'data.': 43, 'create': 44, 'pattern': 45, 'idea': 46, 'computational': 47, 'that': 48, 'called': 49, 'spirits': 50}\n", - "they\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ - "vocab = set(raw_text)\n", + "from torch.utils.data import Dataset, DataLoader\n", "\n", + "class ContextDataset(Dataset):\n", + " \n", + " def __init__(self, tokenized_texts, context_size=2):\n", + " super().__init__()\n", + " # shifted by 2 due to special tokens for padding and unknown tokens\n", + " self.word_to_ix = {}\n", + " self.word_to_ix[''] = 0\n", + " self.word_to_ix[''] = 1\n", + " for text in tokenized_texts:\n", + " self.add_to_vocab(text)\n", + " self.context_size = context_size\n", + " self.contexts = []\n", + " self.targets = []\n", + " for text in tokenized_texts:\n", + " self.add_to_context_and_target(text)\n", + " \n", + " def add_to_vocab(self, text):\n", + " for word in text:\n", + " if word not in self.word_to_ix.keys():\n", + " self.word_to_ix[word] = len(self.word_to_ix)\n", + " \n", + " def add_to_context_and_target(self, text):\n", + " # k words to the left and k to the right\n", + " k = self.context_size\n", + " for i in range(len(text)):\n", + " context = [text[i+j] if 0 <= i+j < len(text) else '' for j in range(-k, k+1) if j != 0]\n", + " target = text[i]\n", + " self.contexts.append(self.get_words_ids(context))\n", + " self.targets.append(self.get_word_id(target))\n", + " \n", + " def get_word_id(self, word):\n", + " if word in self.word_to_ix.keys():\n", + " return self.word_to_ix[word]\n", + " return self.word_to_ix['']\n", "\n", - "# shifted by 2 due to special tokens for padding and unknown tokens\n", - "word_to_ix = {}\n", - "word_to_ix[''] = 0\n", - "word_to_ix[''] = 1\n", - "for i, word in enumerate(vocab):\n", - " word_to_ix[word] = i + 2\n", - "ix_to_word = list(word_to_ix.keys())\n", - "vocab_size = len(word_to_ix)\n", - "\n", - "print(vocab_size)\n", - "print(word_to_ix)\n", - "print(ix_to_word[2])" + " def get_words_ids(self, words):\n", + " return [self.get_word_id(w) for w in words]\n", + " \n", + " @property\n", + " def ix_to_word(self):\n", + " return list(self.word_to_ix.keys())\n", + " \n", + " @property\n", + " def vocab_size(self):\n", + " return len(self.word_to_ix)\n", + " \n", + " def __getitem__(self, idx):\n", + " context = torch.tensor(self.contexts[idx], dtype=torch.long)\n", + " target = torch.tensor(self.targets[idx], dtype=torch.long)\n", + " return context, target\n", + " \n", + " def __len__(self):\n", + " return len(self.contexts)\n" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(['We', 'are', 'to', 'study'], 'about')\n", - "(['are', 'about', 'study', 'the'], 'to')\n", - "(['about', 'to', 'the', 'idea'], 'study')\n" - ] - } - ], + "outputs": [], "source": [ - "context_size = 2 # 2 words to the left and 2 to the right\n", - "data = []\n", - "for i in range(context_size, len(raw_text) - context_size):\n", - " context = [raw_text[i + j] for j in range(- context_size, context_size + 1) if j != 0]\n", - " target = raw_text[i]\n", - " data.append((context, target))\n", + "raw_texts = [\n", + " \"we are about to study the idea of a computational process .\",\n", + " \"computational processes are abstract beings that inhabit computers .\",\n", + " \"as they evolve, processes manipulate other abstract things called data .\",\n", + " \"the evolution of a process is directed by a pattern of rules called a program .\",\n", + " \"people create programs to direct processes .\", \n", + " \"in effect , we conjure the spirits of the computer with our spells .\"\n", + "]\n", + "tokenized_texts = [text.lower().split() for text in raw_texts]\n", "\n", - "print(data[0])\n", - "print(data[1])\n", - "print(data[2])" + "train_dataset = ContextDataset(tokenized_texts, context_size=2)\n", + "train_dataloader = DataLoader(train_dataset, batch_size=4, shuffle=True)\n", + "vocab = train_dataset.word_to_ix\n", + "print('Dataset size:', len(train_dataset))\n", + "print('Vocab size:', train_dataset.vocab_size)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Model" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "def get_list_of_ids(context, word_to_ix):\n", - " list_of_ids = []\n", - " for w in context:\n", - " if w in word_to_ix:\n", - " list_of_ids.append(word_to_ix[w])\n", - " else:\n", - " list_of_ids.append(1) # unknown id = 1\n", - " return list_of_ids\n", + "emb_size = 2\n", + "lr = 0.1\n", "\n", + "model = CBOW(train_dataset.vocab_size, emb_size)\n", + "loss_function = nn.NLLLoss()\n", + "optimizer = torch.optim.SGD(model.parameters(), lr=lr)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Training loop" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def train_model(model, dataloader, optimizer, loss_function, num_epochs=1):\n", + " model.train()\n", + " losses = []\n", + " for epoch in range(1, num_epochs+1):\n", + " print('Starting epoch %d' % epoch)\n", + " total_loss = 0\n", + " hits = 0\n", + " for batch_x, batch_y in train_dataloader:\n", + " optimizer.zero_grad()\n", + " logits = model(batch_x)\n", + " loss = loss_function(logits, batch_y)\n", + " loss.backward()\n", + " optimizer.step()\n", "\n", - "def get_target_id(target, word_to_ix):\n", - " target_word_id = 0\n", - " if target in word_to_ix:\n", - " target_word_id = word_to_ix[target]\n", - " return target_word_id\n", + " loss_value = loss.item()\n", + " total_loss += loss_value\n", + " losses.append(loss_value)\n", + " y_pred = logits.argmax(dim=1)\n", + " hits += torch.sum(y_pred == batch_y).item()\n", + " avg_loss = total_loss / len(train_dataloader.dataset)\n", + " print('Epoch loss: %.4f' % avg_loss)\n", + " acc = hits / len(train_dataloader.dataset)\n", + " print('Epoch accuracy: %.4f' % acc)\n", + " print('Done!')\n", + " return losses" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "losses = train_model(model, train_dataloader, optimizer, loss_function, num_epochs=10)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt\n", + "fig, ax = plt.subplots()\n", + "ax.plot(losses, \".\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plot vectors\n", "\n", + "Since we mapped words to 2D vectors, we can actually plot them. In the real world, however, we would use much larger vector dimensionalities, so we would need some sort of dimensionality reduction algorithm to see a plot like this." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def get_vector(w):\n", + " return model.word_emb(torch.tensor(vocab[w]))\n", "\n", - "def make_context_vector(context, word_to_ix):\n", - " idxs = get_list_of_ids(context, word_to_ix)\n", - " return torch.tensor(idxs, dtype=torch.long)" + "with torch.no_grad():\n", + " fig, ax = plt.subplots(figsize=(12, 8))\n", + " for w in train_dataset.word_to_ix:\n", + " vec = get_vector(w)\n", + " ax.plot(vec[0], vec[1], 'k.')\n", + " ax.annotate(w, (vec[0], vec[1]), textcoords=\"offset points\", xytext=(0, 5), ha='center')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Finding closest words" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def closest(word, n=10):\n", + " vec = get_vector(word)\n", + " all_dists = [(w, torch.dist(vec, get_vector(w)).item()) for w in vocab.keys()]\n", + " return sorted(all_dists, key=lambda t: t[1])[:n]" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[33, 15, 9, 3]\n", - "4\n", - "tensor([33, 15, 9, 3])\n" - ] - } - ], + "outputs": [], "source": [ - "print(get_list_of_ids(data[0][0], word_to_ix))\n", - "print(get_target_id(data[0][1], word_to_ix))\n", - "print(make_context_vector(data[0][0], word_to_ix))" + "closest('program', n=10)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Exercise\n", + "\n", + "Try to implement the SkipGram approach." ] }, { @@ -239,7 +352,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -253,7 +366,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/README.md b/README.md index 87ae473..0dc9e12 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,10 @@ +An introductory course for PyTorch. + +Throughout this course we will be using: +- Python 3.6+. +- PyTorch 1.11.0 + + # Installation First, clone this repository using `git`: @@ -7,9 +14,7 @@ git clone https://github.com/mtreviso/pytorch-lecture.git cd pytorch-lecture ``` -It is highly recommended that you work inside a Python virtualenv. -Note that in this lecture we will be using Python 3.6+. -You can create a virtualenv and install all dependencies via: +It is highly recommended that you work inside a Python virtualenv. You can create one and install all dependencies via: ```sh python3 -m venv env source env/bin/activate @@ -18,7 +23,7 @@ pip3 install -r requirements.txt Run Jupyter: ```sh -jupyter notebook +jupyter-notebook ``` After running the command above, your browser will automatically open the Jupyter homepage: `http://localhost:8888/tree`. diff --git a/bonus-computational-efficiency.ipynb b/bonus-computational-efficiency.ipynb index 43d0dec..0f63b23 100644 --- a/bonus-computational-efficiency.ipynb +++ b/bonus-computational-efficiency.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -22,21 +22,9 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v1 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99]\n", - "\n", - "v2 = [100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199]\n", - "\n", - "v1 dot v2 = 823350\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "def array_dot_product(v1, v2):\n", " dot_product = 0\n", @@ -65,17 +53,9 @@ }, { "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "7.52 µs ± 80 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "%timeit array_dot_product(v1, v2)" ] @@ -89,30 +69,9 @@ }, { "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "v1: [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23\n", - " 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47\n", - " 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71\n", - " 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95\n", - " 96 97 98 99]\n", - "\n", - "v2: [100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117\n", - " 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135\n", - " 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153\n", - " 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171\n", - " 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189\n", - " 190 191 192 193 194 195 196 197 198 199]\n", - "\n", - "v1 dot v2 = 823350\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "v1_np = np.arange(100)\n", "v2_np = np.arange(100, 200)\n", @@ -132,17 +91,9 @@ }, { "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "765 ns ± 3.69 ns per loop (mean ± std. dev. of 7 runs, 1000000 loops each)\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "%timeit v1_np.dot(v2_np)" ] @@ -156,7 +107,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -180,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -203,26 +154,9 @@ }, { "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[ True, True, True, ..., True, True, True],\n", - " [ True, True, True, ..., False, True, False],\n", - " [False, True, True, ..., False, True, True],\n", - " ...,\n", - " [ True, True, True, ..., True, False, False],\n", - " [False, False, False, ..., True, True, False],\n", - " [False, False, False, ..., False, True, True]])" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "result_list = np.array(result_list)\n", "result_list == result_numpy" @@ -237,52 +171,18 @@ }, { "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", - " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", - " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", - " -7.10542736e-15, 0.00000000e+00, 7.10542736e-15],\n", - " [-7.10542736e-15, 0.00000000e+00, 0.00000000e+00, ...,\n", - " -7.10542736e-15, 0.00000000e+00, 0.00000000e+00],\n", - " ...,\n", - " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", - " 0.00000000e+00, 7.10542736e-15, -7.10542736e-15],\n", - " [-7.10542736e-15, -7.10542736e-15, 7.10542736e-15, ...,\n", - " 0.00000000e+00, 0.00000000e+00, -7.10542736e-15],\n", - " [ 3.55271368e-14, -1.42108547e-14, -2.84217094e-14, ...,\n", - " -1.42108547e-14, 0.00000000e+00, 0.00000000e+00]])" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "result_list - result_numpy" ] }, { "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "9.776357501323218e-11" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "np.abs(result_list - result_numpy).sum()" ] @@ -296,65 +196,147 @@ }, { "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "706 ms ± 2.84 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "%timeit matrix_dot_product(m1_list, m2_t_list)" ] }, { "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "176 µs ± 7.43 µs per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "%timeit m1.dot(m2)" ] }, { "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "3509433.962264151" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "time1 = 747-3\n", - "time2 = 212e-6\n", - "time1 / time2" + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "time1 = 647e-3\n", + "time2 = 215e-6\n", + "print('Numpy is ~{:.0f}x faster than standard python'.format(time1 / time2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Enters PyTorch" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m1_pt = torch.from_numpy(m1)\n", + "m2_pt = torch.from_numpy(m2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%timeit m1_pt @ m2_pt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Seems about the same... Now let's try to use a GPU:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "m1_pt = m1_pt.to('cuda')\n", + "m2_pt = m2_pt.to('cuda')\n", + "%timeit m1_pt @ m2_pt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Can we make make things more efficient? Enters **JIT**." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@torch.jit.script\n", + "def jit_mm(m1, m2):\n", + " return m1 @ m2\n", + "\n", + "%timeit jit_mm(m1_pt, m2_pt)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%timeit jit_mm(m1_pt, m2_pt)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "traced_mm = torch.jit.trace(jit_mm, (torch.rand(2,2), torch.rand(2,2)))\n", + "\n", + "%timeit traced_mm(m1_pt, m2_pt)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%timeit traced_mm(m1_pt, m2_pt)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -368,7 +350,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.8" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/challenges-for-true-pytorch-heroes.ipynb b/challenges-for-true-pytorch-heroes.ipynb new file mode 100644 index 0000000..705ee7f --- /dev/null +++ b/challenges-for-true-pytorch-heroes.ipynb @@ -0,0 +1,817 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PyTorch Challenges\n", + "\n", + "[Sasha Rush](https://twitter.com/srush_nlp) compiled a set of [16 Tensor mini-puzzles](https://github.com/srush/Tensor-Puzzles) that involve reasoning about broadcasting in a constrained setting: people are allowed to use only a single PyTorch function: `torch.arange`. Can you do it?\n", + "\n", + "Here, I've extended his list to 27 puzzles! \n", + "\n", + "**Rules**\n", + "\n", + "- Each puzzle needs to be solved in 1 line (<80 columns) of code.\n", + "- You are allowed @, arithmetic, comparison, shape, any indexing (e.g. `a[:j], a[:, None], a[arange(10)]`), and previous puzzle functions.\n", + "- To start off, we give you an implementation for the `torch.arange` function.\n", + "\n", + "**Anti-Rules**\n", + "- Nothing else. No `.view, .sum, .take, .squeeze, .tensor`.\n", + "- No cheating. Stackoverflow is great, but this is about first-principles.\n", + "- Hint... these puzzles are mostly about [Broadcasting](https://pytorch.org/docs/master/notes/broadcasting.html). Make sure you understand this rule, which is a key concept for dealing with n-dimensional arrays.\n", + "\n", + "🐶🐶🐶 After you convince yourself your code is correct, run the cell to test it. If the test succeeds, you will get a puppy 🐶🐶🐶." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "List of puzzles:\n", + "\n", + "1. [where](#1\\)-where)\n", + "2. [ones](#2\\)-ones)\n", + "3. [sum](#3\\)-sum)\n", + "4. [outer](#4\\)-outer)\n", + "5. [diag](#5\\)-diag)\n", + "6. [eye](#6\\)-eye)\n", + "7. [triu](#7\\)-triu)\n", + "8. [cumsum](#8\\)-cumsum)\n", + "9. [diff](#9\\)-diff)\n", + "10. [vstack](#10\\)-vstack)\n", + "11. [roll](#11\\)-roll)\n", + "12. [flip](#12\\)-flip)\n", + "13. [compress](#13\\)-compress)\n", + "14. [pad_to](#14\\)-pad_to)\n", + "15. [sequence_mask](#15\\)-sequence_mask)\n", + "16. [bincount](#16\\)-bincount)\n", + "17. [scatter_add](#17\\)-scatter_add)\n", + "18. [flatten](#18\\)-flatten)\n", + "19. [linspace](#19\\)-linspace)\n", + "20. [heaviside](#20\\)-heaviside)\n", + "21. [hstack](#21\\)-hstack)\n", + "22. [view](#22\\)-view-\\(1d-to-2d\\))\n", + "23. [repeat](#23\\)-repeat-\\(1d\\))\n", + "24. [repeat_interleave](#24\\)-repeat_interleave-\\(1d\\))\n", + "25. [chunk](#25\\)-chunk)\n", + "26. [nonzero](#26\\)-nonzero)\n", + "27. [bucketize](#27\\)-bucketize)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -qqq torchtyping hypothesis pytest" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from spec import make_test, run_test, TT" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### arange\n", + "\n", + "This one is given! Think about it as a \"for-loop\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def arange(i: int):\n", + " return torch.arange(i)\n", + "\n", + "arange(6)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1) where\n", + "https://numpy.org/doc/stable/reference/generated/numpy.where.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def where_spec(q, a, b, out):\n", + " for i in range(len(out)):\n", + " out[i] = a[i] if q[i] else b[i]\n", + "\n", + "def where(q: TT[\"i\", bool], a: TT[\"i\"], b: TT[\"i\"]) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"where\", where, where_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2) ones\n", + "https://numpy.org/doc/stable/reference/generated/numpy.ones.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def ones_spec(out):\n", + " for i in range(len(out)):\n", + " out[i] = 1\n", + "\n", + "def ones(i: int) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"one\", ones, ones_spec, add_sizes=[\"i\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 3) sum\n", + "https://numpy.org/doc/stable/reference/generated/numpy.sum.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def sum_spec(a, out):\n", + " out[0] = 0\n", + " for i in range(len(a)):\n", + " out[0] += a[i]\n", + "\n", + "def sum(a: TT[\"i\"]) -> TT[1]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"sum\", sum, sum_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 4) outer\n", + "https://numpy.org/doc/stable/reference/generated/numpy.outer.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def outer_spec(a, b, out):\n", + " for i in range(len(out)):\n", + " for j in range(len(out[0])):\n", + " out[i][j] = a[i] * b[j]\n", + "\n", + "def outer(a: TT[\"i\"], b: TT[\"j\"]) -> TT[\"i\", \"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"outer\", outer, outer_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 5) diag\n", + "https://numpy.org/doc/stable/reference/generated/numpy.diag.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def diag_spec(a, out):\n", + " for i in range(len(a)):\n", + " out[i] = a[i][i]\n", + " \n", + "def diag(a: TT[\"i\", \"i\"]) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"diag\", diag, diag_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 6) eye\n", + "https://numpy.org/doc/stable/reference/generated/numpy.eye.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def eye_spec(out):\n", + " for i in range(len(out)):\n", + " out[i][i] = 1\n", + " \n", + "def eye(j: int) -> TT[\"j\", \"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"eye\", eye, eye_spec, add_sizes=[\"j\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 7) triu\n", + "https://numpy.org/doc/stable/reference/generated/numpy.triu.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def triu_spec(out):\n", + " for i in range(len(out)):\n", + " for j in range(len(out)):\n", + " if i <= j:\n", + " out[i][j] = 1\n", + " else:\n", + " out[i][j] = 0\n", + " \n", + "def triu(j: int) -> TT[\"j\", \"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"triu\", triu, triu_spec, add_sizes=[\"j\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 8) cumsum\n", + "https://numpy.org/doc/stable/reference/generated/numpy.cumsum.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def cumsum_spec(a, out):\n", + " total = 0\n", + " for i in range(len(out)):\n", + " out[i] = total + a[i]\n", + " total += a[i]\n", + "\n", + "def cumsum(a: TT[\"i\"]) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"cumsum\", cumsum, cumsum_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 9) diff\n", + "https://numpy.org/doc/stable/reference/generated/numpy.diff.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def diff_spec(a, out):\n", + " out[0] = a[0]\n", + " for i in range(1, len(out)):\n", + " out[i] = a[i] - a[i - 1]\n", + "\n", + "def diff(a: TT[\"i\"], i: int) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"diff\", diff, diff_spec, add_sizes=[\"i\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 10) vstack\n", + "https://numpy.org/doc/stable/reference/generated/numpy.vstack.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def vstack_spec(a, b, out):\n", + " for i in range(len(out[0])):\n", + " out[0][i] = a[i]\n", + " out[1][i] = b[i]\n", + "\n", + "def vstack(a: TT[\"i\"], b: TT[\"i\"]) -> TT[2, \"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"vstack\", vstack, vstack_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 11) roll\n", + "https://numpy.org/doc/stable/reference/generated/numpy.roll.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def roll_spec(a, out):\n", + " for i in range(len(out)):\n", + " if i + 1 < len(out):\n", + " out[i] = a[i + 1]\n", + " else:\n", + " out[i] = a[i + 1 - len(out)]\n", + " \n", + "def roll(a: TT[\"i\"], i: int) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"roll\", roll, roll_spec, add_sizes=[\"i\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 12) flip\n", + "https://numpy.org/doc/stable/reference/generated/numpy.flip.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def flip_spec(a, out):\n", + " for i in range(len(out)):\n", + " out[i] = a[len(out) - i - 1]\n", + " \n", + "def flip(a: TT[\"i\"], i: int) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"flip\", flip, flip_spec, add_sizes=[\"i\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 13) compress\n", + "https://numpy.org/doc/stable/reference/generated/numpy.compress.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def compress_spec(g, v, out):\n", + " j = 0\n", + " for i in range(len(g)):\n", + " if g[i]:\n", + " out[j] = v[i]\n", + " j += 1\n", + " \n", + "def compress(g: TT[\"i\", bool], v: TT[\"i\"], i:int) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"compress\", compress, compress_spec, add_sizes=[\"i\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 14) pad_to\n", + "\n", + "https://pytorch.org/docs/stable/generated/torch.nn.utils.rnn.pad_sequence.html?highlight=pad#torch.nn.utils.rnn.pad_sequence" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def pad_to_spec(a, out):\n", + " for i in range(min(len(out), len(a))):\n", + " out[i] = a[i]\n", + "\n", + "def pad_to(a: TT[\"i\"], i: int, j: int) -> TT[\"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"pad_to\", pad_to, pad_to_spec, add_sizes=[\"i\", \"j\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 15) sequence_mask\n", + "https://www.tensorflow.org/api_docs/python/tf/sequence_mask" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def sequence_mask_spec(values, length, out):\n", + " for i in range(len(out)):\n", + " for j in range(len(out[0])):\n", + " if j < length[i]:\n", + " out[i][j] = values[i][j]\n", + " else:\n", + " out[i][j] = 0\n", + "\n", + "def constraint_set_length(d, sizes=None):\n", + " d[\"length\"] = d[\"length\"] % d[\"values\"].shape[1]\n", + " return d\n", + " \n", + "def sequence_mask(values: TT[\"i\", \"j\"], length: TT[\"i\", int]) -> TT[\"i\", \"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"sequence_mask\",\n", + " sequence_mask, sequence_mask_spec, constraint=constraint_set_length\n", + "))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 16) bincount\n", + "https://numpy.org/doc/stable/reference/generated/numpy.bincount.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def bincount_spec(a, out):\n", + " for i in range(len(a)):\n", + " out[a[i]] += 1\n", + " \n", + "def constraint_set_max(d, sizes=None):\n", + " d[\"a\"] = d[\"a\"] % d[\"return\"].shape[0]\n", + " return d\n", + " \n", + "def bincount(a: TT[\"i\"], j: int) -> TT[\"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"bincount\",\n", + " bincount, bincount_spec, add_sizes=[\"j\"], constraint=constraint_set_max\n", + "))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 17) scatter_add\n", + "https://pytorch-scatter.readthedocs.io/en/1.3.0/functions/add.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def scatter_add_spec(values, link, out):\n", + " for j in range(len(values)):\n", + " out[link[j]] += values[j]\n", + "\n", + "def constraint_set_max(d, sizes=None):\n", + " d[\"link\"] = d[\"link\"] % d[\"return\"].shape[0]\n", + " return d\n", + "\n", + "def scatter_add(values: TT[\"i\"], link: TT[\"i\"], j: int) -> TT[\"j\"]:\n", + " raise NotImplementedError\n", + "\n", + "\n", + "run_test(make_test(\"scatter_add\",\n", + " scatter_add, scatter_add_spec, add_sizes=[\"j\"], constraint=constraint_set_max\n", + "))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 18) flatten\n", + "\n", + "https://numpy.org/doc/stable/reference/generated/numpy.ndarray.flatten.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def flatten_spec(a, out):\n", + " k = 0\n", + " for i in range(len(a)):\n", + " for j in range(len(a[0])):\n", + " out[k] = a[i][j]\n", + " k += 1\n", + "\n", + "def flatten(a: TT[\"i\", \"j\"], i:int, j:int) -> TT[\"i * j\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"flatten\", flatten, flatten_spec, add_sizes=[\"i\", \"j\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 19) linspace\n", + "\n", + "https://numpy.org/doc/stable/reference/generated/numpy.linspace.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def linspace_spec(i, j, out):\n", + " for k in range(len(out)):\n", + " out[k] = float(i + (j - i) * k / max(1, len(out) - 1))\n", + "\n", + "def linspace(i: TT[1], j: TT[1], n: int) -> TT[\"n\", float]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"linspace\", linspace, linspace_spec, add_sizes=[\"n\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 20) heaviside\n", + "\n", + "https://numpy.org/doc/stable/reference/generated/numpy.heaviside.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def heaviside_spec(a, b, out):\n", + " for k in range(len(out)):\n", + " if a[k] == 0:\n", + " out[k] = b[k]\n", + " else:\n", + " out[k] = int(a[k] > 0)\n", + "\n", + "def heaviside(a: TT[\"i\"], b: TT[\"i\"]) -> TT[\"i\"]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"heaviside\", heaviside, heaviside_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 21) hstack\n", + "\n", + "https://numpy.org/doc/stable/reference/generated/numpy.hstack.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def hstack_spec(a, b, out):\n", + " for i in range(len(out)):\n", + " out[i][0] = a[i]\n", + " out[i][1] = b[i]\n", + " \n", + "def hstack(a: TT[\"i\"], b: TT[\"i\"]) -> TT[\"i\", 2]:\n", + " raise NotImplementedError\n", + "\n", + "run_test(make_test(\"hstack\", hstack, hstack_spec))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "\n", + "No more puppies from now on... For now, check with the examples shown in the docs." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 22) view (1d to 2d)\n", + "\n", + "https://pytorch.org/docs/stable/generated/torch.Tensor.view.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def view(a: TT[\"i * j\"], i: int, j: int) -> TT[\"i\", \"j\"]:\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 23) repeat (1d)\n", + "\n", + "https://pytorch.org/docs/stable/generated/torch.Tensor.repeat.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def repeat(a: TT[\"i\"], d: int) -> TT[\"d\"]:\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 24) repeat_interleave (1d)\n", + "\n", + "https://pytorch.org/docs/stable/generated/torch.repeat_interleave.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def repeat_interleave(a: TT[\"i\"], d: int) -> TT[\"d\"]:\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 25) chunk\n", + "https://pytorch.org/docs/stable/generated/torch.chunk.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def chunk(a: TT[\"i\"], c: int) -> TT[\"c\", \"i // c\"]:\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 26) nonzero\n", + "https://pytorch.org/docs/stable/generated/torch.nonzero.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def nonzero(a: TT[\"i\",\"j\"], i: int, j: int) -> TT[\"k\", 2]:\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 27) bucketize\n", + "https://pytorch.org/docs/stable/generated/torch.bucketize.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def bucketize(v: TT[\"i\"], boundaries: TT[\"j\"]) -> TT[\"i\"]:\n", + " raise NotImplementedError" + ] + } + ], + "metadata": { + "anaconda-cloud": {}, + "celltoolbar": "Raw Cell Format", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/requirements.txt b/requirements.txt index 58ba14d..2f25bf0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,13 @@ +celluloid ipython +ipdb jupyter matplotlib numpy scikit-learn +scipy +seaborn +pandas +Pillow torch torchvision diff --git a/spec.py b/spec.py new file mode 100644 index 0000000..6ddbc31 --- /dev/null +++ b/spec.py @@ -0,0 +1,182 @@ +import torch +from torchtyping import TensorType as TT +from hypothesis.extra.numpy import arrays +from hypothesis.strategies import integers, lists, composite, floats +from hypothesis import given +import numpy as np +import random +import sys +import typing + +tensor = torch.tensor + +numpy_to_torch_dtype_dict = { + bool: torch.bool, + np.uint8: torch.uint8, + np.int8: torch.int8, + np.int16: torch.int16, + np.int32: torch.int32, + np.int64: torch.int64, + np.float16: torch.float16, + np.float32: torch.float32, + np.float64: torch.float64, +} +torch_to_numpy_dtype_dict = {v: k for k, v in numpy_to_torch_dtype_dict.items()} + + +@composite +def spec(draw, x, min_size=1): + # Get the type hints. + if sys.version_info >= (3, 9): + gth = typing.get_type_hints(x, include_extras=True) + else: + gth = typing.get_type_hints(x) + + # Collect all the dimension names. + names = set() + for k in gth: + if not hasattr(gth[k], "__metadata__"): + continue + dims = gth[k].__metadata__[0]["details"][0].dims + names.update([d.name for d in dims if isinstance(d.name, str)]) + names = list(names) + + # draw sizes for each dim. + size = integers(min_value=min_size, max_value=5) + arr = draw(arrays(shape=(len(names),), unique=True, elements=size, dtype=np.int32)).tolist() + sizes = dict(zip(names, arr)) + for n in list(sizes.keys()): + if '*' in n or '+' in n or '-' in n or '//' in n: + i, op, j = n.split() + i_val = i if i.isdigit() else sizes[i] + j_val = j if j.isdigit() else sizes[j] + sizes[n] = eval('{}{}{}'.format(i_val, op,j_val)) + + # Create tensors for each size. + ret = {} + for k in gth: + if not hasattr(gth[k], "__metadata__"): + continue + shape = tuple( + [ + sizes[d.name] if isinstance(d.name, str) else d.size + for d in gth[k].__metadata__[0]["details"][0].dims + ] + ) + dtype = (torch_to_numpy_dtype_dict[ + gth[k].__metadata__[0]["details"][1].dtype + ] + if len(gth[k].__metadata__[0]["details"]) >= 2 + else int) + ret[k] = draw( + arrays( + shape=shape, + dtype=dtype, + elements=integers(min_value=-5, max_value=5) if + dtype == int else None, + unique=False + ) + ) + ret[k] = np.nan_to_num(ret[k], nan=0, neginf=0, posinf=0) + ret["return"][:] = 0 + return ret, sizes + + +def make_test(name, problem, problem_spec, add_sizes=[], constraint=lambda d, sizes: d): + examples = [] + for i in range(3): + example, sizes = spec(problem, 3).example() + example = constraint(example, sizes=sizes) + out = example["return"].tolist() + del example["return"] + problem_spec(*example.values(), out) + + for size in add_sizes: + example[size] = sizes[size] + + yours = None + try: + yours = problem(*map(tensor, example.values())) + + except AssertionError: + pass + for size in add_sizes: + del example[size] + example["target"] = tensor(out) + if yours is not None: + example["yours"] = yours + examples.append(example) + + @given(spec(problem)) + def test_problem(d): + d, sizes = d + + d = constraint(d, sizes=sizes) + out = d["return"].tolist() + del d["return"] + problem_spec(*d.values(), out) + for size in add_sizes: + d[size] = sizes[size] + + out2 = problem(*map(tensor, d.values())) + out = tensor(out) + out2 = torch.broadcast_to(out2, out.shape) + assert torch.allclose( + out, out2 + ), "Two tensors are not equal\n Spec: \n\t%s \n\t%s" % (out, out2) + + return test_problem + + +def run_test(fn): + fn() + # Generate a random puppy video if you are correct. + print("Correct!") + from IPython.display import HTML + pups = [ + "2m78jPG", + "pn1e9TO", + "MQCIwzT", + "udLK6FS", + "ZNem5o3", + "DS2IZ6K", + "aydRUz8", + "MVUdQYK", + "kLvno0p", + "wScLiVz", + "Z0TII8i", + "F1SChho", + "9hRi2jN", + "lvzRF3W", + "fqHxOGI", + "1xeUYme", + "6tVqKyM", + "CCxZ6Wr", + "lMW0OPQ", + "wHVpHVG", + "Wj2PGRl", + "HlaTE8H", + "k5jALH0", + "3V37Hqr", + "Eq2uMTA", + "Vy9JShx", + "g9I2ZmK", + "Nu4RH7f", + "sWp0Dqd", + "bRKfspn", + "qawCMl5", + "2F6j2B4", + "fiJxCVA", + "pCAIlxD", + "zJx2skh", + "2Gdl1u7", + "aJJAY4c", + "ros6RLC", + "DKLBJh7", + "eyxH0Wc", + "rJEkEw4"] + return HTML(""" + + """%(random.sample(pups, 1)[0])) From 86d9a4f38076ed5ec3ea41c3bc976e6bc967a7fa Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Thu, 5 May 2022 04:57:41 +0100 Subject: [PATCH 14/18] Update notebook --- bonus-computational-efficiency.ipynb | 191 ++++++++++++++------------- 1 file changed, 99 insertions(+), 92 deletions(-) diff --git a/bonus-computational-efficiency.ipynb b/bonus-computational-efficiency.ipynb index 0f63b23..af2be26 100644 --- a/bonus-computational-efficiency.ipynb +++ b/bonus-computational-efficiency.ipynb @@ -4,11 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Numpy and Computation Efficiency\n", + "# Computation Efficiency with Numpy, PyTorch, and JIT\n", "\n", - "This notebooks illustrates the computational efficiency of running linear algebra with the proper tools - such as numpy.\n", - "\n", - "Let's compute an array dot product in Python:" + "This notebooks illustrates the computational efficiency of running linear algebra with the proper tools - such as numpy." ] }, { @@ -17,7 +15,22 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np" + "from matplotlib import pyplot as plt\n", + "\n", + "def plot_times(labels, times):\n", + " x = list(range(len(times)))\n", + " fig, ax = plt.subplots()\n", + " ax.grid(alpha=0.5, ls='--', which='both')\n", + " ax.bar(x, times, log=True)\n", + " ax.set_xticks(x, labels)\n", + " ax.set_axisbelow(True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's compute an array dot product in Python:" ] }, { @@ -27,21 +40,19 @@ "outputs": [], "source": [ "def array_dot_product(v1, v2):\n", - " dot_product = 0\n", - " \n", + " result = 0\n", " for v1_i, v2_i in zip(v1, v2):\n", - " dot_product += v1_i * v2_i\n", - " \n", - " return dot_product\n", + " result += v1_i * v2_i\n", + " return result\n", "\n", "v1 = list(range(100))\n", - "v2 = list(range(100, 200))\n", + "v2 = [1]*100\n", "\n", - "print(\"v1 = %s\\n\" % v1)\n", - "print(\"v2 = %s\\n\" % v2)\n", + "print(\"v1 = {}\".format(v1))\n", + "print(\"v2 = {}\\n\".format(v2))\n", "\n", - "result = array_dot_product(v1, v2)\n", - "print(\"v1 dot v2 = %d\" % result)" + "print(\"v1 dot v2: {}\".format(array_dot_product(v1, v2)))\n", + "print(\"1+2+...+99:\", 99*100/2)" ] }, { @@ -64,7 +75,18 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now let's try with numpy -- it uses data structures like in C, optimized for mathematical operations, without the Python overhead." + "## Enters numpy\n", + "\n", + "Now let's try with numpy, which uses a C backend optimized for mathematical operations, alleviating the Python overhead." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np" ] }, { @@ -74,19 +96,15 @@ "outputs": [], "source": [ "v1_np = np.arange(100)\n", - "v2_np = np.arange(100, 200)\n", - "print(\"v1: %s\\n\" % v1_np)\n", - "print(\"v2: %s\\n\" % v2_np)\n", - "\n", - "result = v1_np.dot(v2_np)\n", - "print(\"v1 dot v2 = %d\" % result)" + "v2_np = np.ones(100)\n", + "print(\"v1 dot v2: {}\".format(v1_np.dot(v2_np)))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Nice, aligned formatting. Now let's check the running time." + "Nice, aligned with our raw Python version. Now let's check the running time." ] }, { @@ -102,7 +120,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "What about matrices?" + "We can already se the difference. Numpy was roughly 6x faster than raw PyTorch for a very small array. New let's check with matrices." ] }, { @@ -111,21 +129,19 @@ "metadata": {}, "outputs": [], "source": [ - "def matrix_dot_product(m1, m2_t):\n", + "def matrix_mul(m1, m2):\n", " num_rows = len(m1)\n", - " num_columns = len(m2_t)\n", + " num_columns = len(m2[0])\n", " internal_dim = len(m1[0])\n", " result = []\n", - " \n", " for i in range(num_rows):\n", " new_row = []\n", " for j in range(num_columns):\n", " total = 0\n", " for k in range(internal_dim):\n", - " total += m1[i][k] * m2_t[j][k]\n", + " total += m1[i][k] * m2[k][j]\n", " new_row.append(total)\n", " result.append(new_row)\n", - " \n", " return result" ] }, @@ -135,14 +151,13 @@ "metadata": {}, "outputs": [], "source": [ - "m1 = np.random.rand(100, 200)\n", - "m2 = np.random.rand(200, 300)\n", + "m1_np = np.random.randn(100, 200)\n", + "m2_np = np.random.randn(200, 100)\n", + "m1_list = m1_np.tolist()\n", + "m2_list = m2_np.tolist()\n", "\n", - "m2_t = m2.T\n", - "m1_list = m1.tolist()\n", - "m2_t_list = m2_t.tolist()\n", - "result_list = matrix_dot_product(m1_list, m2_t_list)\n", - "result_numpy = m1.dot(m2)" + "result_raw = matrix_mul(m1_list, m2_list)\n", + "result_np = m1_np.dot(m2_np)" ] }, { @@ -158,15 +173,15 @@ "metadata": {}, "outputs": [], "source": [ - "result_list = np.array(result_list)\n", - "result_list == result_numpy" + "eps = np.abs(result_raw - result_np).sum()\n", + "print('{} up to {}'.format(np.allclose(result_raw, result_np), eps))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Different? How much?" + "Okay. Now lets time it again." ] }, { @@ -175,7 +190,7 @@ "metadata": {}, "outputs": [], "source": [ - "result_list - result_numpy" + "time_raw = %timeit -o matrix_mul(m1_list, m2_list) " ] }, { @@ -184,23 +199,20 @@ "metadata": {}, "outputs": [], "source": [ - "np.abs(result_list - result_numpy).sum()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Okay. Now lets time it again." + "time_np = %timeit -o m1_np.dot(m2_np)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "scrolled": true + }, "outputs": [], "source": [ - "%timeit matrix_dot_product(m1_list, m2_t_list)" + "time_ratio = time_raw.average / time_np.average\n", + "print('Numpy is ~{:.0f}x faster than standard python'.format(time_ratio))\n", + "print('Something the runs in 1h in numpy would need to run for {:.0f} days in raw python'.format(time_ratio / 24))" ] }, { @@ -209,25 +221,25 @@ "metadata": {}, "outputs": [], "source": [ - "%timeit m1.dot(m2)" + "plot_times(['python', 'numpy'], [time_raw.average, time_np.average])" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "time1 = 647e-3\n", - "time2 = 215e-6\n", - "print('Numpy is ~{:.0f}x faster than standard python'.format(time1 / time2))" + "## Enters PyTorch\n", + "\n", + "Now let's try with PyTorch. Note that PyTorch also uses a C-backend to implement linear algebra methods. However, it also has the power to run those operation on GPUs. Let's try both variants and compare them." ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "## Enters PyTorch" + "import torch" ] }, { @@ -236,7 +248,8 @@ "metadata": {}, "outputs": [], "source": [ - "import torch" + "m1_pt = torch.from_numpy(m1_np)\n", + "m2_pt = torch.from_numpy(m2_np)" ] }, { @@ -245,8 +258,7 @@ "metadata": {}, "outputs": [], "source": [ - "m1_pt = torch.from_numpy(m1)\n", - "m2_pt = torch.from_numpy(m2)" + "time_pt = %timeit -o m1_pt @ m2_pt" ] }, { @@ -255,7 +267,8 @@ "metadata": {}, "outputs": [], "source": [ - "%timeit m1_pt @ m2_pt" + "plot_times(['python', 'numpy', 'pytorch'], \n", + " [time_raw.average, time_np.average, time_pt.average])" ] }, { @@ -271,16 +284,9 @@ "metadata": {}, "outputs": [], "source": [ - "m1_pt = m1_pt.to('cuda')\n", - "m2_pt = m2_pt.to('cuda')\n", - "%timeit m1_pt @ m2_pt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Can we make make things more efficient? Enters **JIT**." + "m1_pt = m1_pt.to('cuda' if torch.cuda.is_available() else 'cpu')\n", + "m2_pt = m2_pt.to('cuda' if torch.cuda.is_available() else 'cpu')\n", + "time_pt_gpu = %timeit -o m1_pt @ m2_pt" ] }, { @@ -289,31 +295,24 @@ "metadata": {}, "outputs": [], "source": [ - "@torch.jit.script\n", - "def jit_mm(m1, m2):\n", - " return m1 @ m2\n", - "\n", - "%timeit jit_mm(m1_pt, m2_pt)" + "plot_times(['numpy', 'pytorch (cpu)', 'pytorch (gpu)'], \n", + " [time_np.average, time_pt.average, time_pt_gpu.average])" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "%timeit jit_mm(m1_pt, m2_pt)" + "## Enters JIT" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "traced_mm = torch.jit.trace(jit_mm, (torch.rand(2,2), torch.rand(2,2)))\n", + "Now suppose we have an even more complicated function that contains control flows (if-else statements). To handle that, we have to rely on the Python interpreter, which is slow. To circumvent that, we can \"compile\" our function/module into a fixed intermediate-level code representation. \n", "\n", - "%timeit traced_mm(m1_pt, m2_pt)" + "https://pytorch.org/docs/stable/jit.html" ] }, { @@ -322,15 +321,23 @@ "metadata": {}, "outputs": [], "source": [ - "%timeit traced_mm(m1_pt, m2_pt)" + "@torch.jit.script\n", + "def jit_mm(m1, m2):\n", + " return m1 @ m2\n", + "\n", + "time_pt_jit = %timeit -o jit_mm(m1_pt, m2_pt)\n", + "\n", + "plot_times(['numpy', 'pt (cpu)', 'pt (gpu)', 'pt (gpu+jit)'], \n", + " [time_np.average, time_pt.average, time_pt_gpu.average, time_pt_jit.average])" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], - "source": [] + "source": [ + "For more optimizations, check this blog post by Horace He:\n", + "[Making Deep Learning Go Brrrr From First Principles](https://horace.io/brrr_intro.html)" + ] } ], "metadata": { From 8ea3f9745fdf7e19e44e2c807f609ef3d9918b02 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Thu, 5 May 2022 05:19:09 +0100 Subject: [PATCH 15/18] Update README.md --- README.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/README.md b/README.md index 0dc9e12..da1d114 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,22 @@ Throughout this course we will be using: - PyTorch 1.11.0 +# Lectures + +[Lecture 0](https://github.com/mtreviso/pytorch-lecture/blob/master/00-intro.ipynb): Hello world, introduction to Jupyter, and PyTorch high-level overview +
+[Lecture 1](https://github.com/mtreviso/pytorch-lecture/blob/master/01-pytorch-basics.ipynb): Introduction to PyTorch: tensors, tensor operations, gradients, autodiff, and broadcasting +
+[Lecture 2](https://github.com/mtreviso/pytorch-lecture/blob/master/02-linear-regression.ipynb): Linear Regression via Gradient Descent using Numpy, Numpy + Autodiff, and PyTorch +
+[Lecture 3](https://github.com/mtreviso/pytorch-lecture/blob/master/03-modules-and-mlps.ipynb): PyTorch `nn.Modules` alongside training and evaluation loop +
+[Lecture 4](https://github.com/mtreviso/pytorch-lecture/blob/master/04-optional-word2vec.ipynb): Implementation of a proof-of-concept Word2Vec in PyTorch
+⏳⏳⏳ [Bonus](https://github.com/mtreviso/pytorch-lecture/blob/master/bonus-computational-efficiency.ipynb): Comparison of the computation efficiency between raw Python, Numpy, and PyTorch (with and without JIT) +
+🔥🔥🔥 [PyTorch Challenges](https://github.com/mtreviso/pytorch-lecture/blob/master/challenges-for-true-pytorch-heroes.ipynb): a set of 27 mini-puzzles 🧩 (extension of the ones proposed by [Sasha Rush](https://github.com/srush/Tensor-Puzzles)) + + # Installation First, clone this repository using `git`: @@ -27,3 +43,6 @@ jupyter-notebook ``` After running the command above, your browser will automatically open the Jupyter homepage: `http://localhost:8888/tree`. + + + From 54ace1e19eb1a5ea8f5a51325e9122623eea21a7 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Sun, 8 May 2022 06:57:23 +0100 Subject: [PATCH 16/18] Add more notebooks --- broadcasting_real_examples.ipynb | 1649 +++++++++++++++++ ...es-for-true-pytorch-heroes-solutions.ipynb | 950 ++++++++++ challenges-for-true-pytorch-heroes.ipynb | 3 + 3 files changed, 2602 insertions(+) create mode 100644 broadcasting_real_examples.ipynb create mode 100644 challenges-for-true-pytorch-heroes-solutions.ipynb diff --git a/broadcasting_real_examples.ipynb b/broadcasting_real_examples.ipynb new file mode 100644 index 0000000..b580020 --- /dev/null +++ b/broadcasting_real_examples.ipynb @@ -0,0 +1,1649 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "e5b1ca63", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: CUDA_VISIBLE_DEVICES=0\n" + ] + } + ], + "source": [ + "%env CUDA_VISIBLE_DEVICES=0" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "99af0cce", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "id": "bfdb8648", + "metadata": {}, + "source": [ + "# From Puzzles to Real Code\n", + "\n", + "All of these puzzles induces you to think about a smart way of using broadcast rules. But turns out that broadcasting is not only useful to solve \"puzzles\". To illustrate this better, here I'm going to show you two code snippets that I took from my recent research projects. For both problems, I've used broadcasting to write an optimized version of standard PyTorch functions. \n", + "\n", + "Since in real problems we usually have tensors with a batch dimension to leverage GPUs, these optimizations have to deal with the `batch` dimension. Note also that contrary to some problems, the ones covered here cannot be solved via reshaping since each sequence in the batch is an independent example." + ] + }, + { + "cell_type": "markdown", + "id": "4eeb3637", + "metadata": {}, + "source": [ + "# Aggregating word pieces\n", + "\n", + "When we tokenize texts into word pieces (e.g., BPEs), we end up splitting not only one word from another, but also pieces of the word itself. For example:\n", + "\n", + "```python\n", + ">>> wordpiece_tokenize(\"Welcome to the jungle\")\n", + "[\"_Wel\", \"come\", \"_to\", \"_the\", \"_jungle\"]\n", + "```\n", + "\n", + "The symbol `_` represents the first piece of a tokenized word. Word piece tokenization has some advantages such as limiting the size of the vocabulary. However, consider a word labelling problem where each word is associated with a label, such as POS tagging or NER. A direct consequence of using word piece tokenization is that the number of \"tokens\" $m$ becomes larger than the actual number of words/labels $n$. Therefore, we need to **map** the tokenized pieces back to their actual words, such that $m = n$ again.\n", + "\n", + "A simple way to solve this problem is following the strategy adopted by BERT: we only select the information from the first word piece. For example:\n", + "\n", + "```python\n", + ">>> map_pieces_to_words([\"_Wel\", \"come\", \"_to\", \"_the\", \"_jungle\"])\n", + "[\"_Wel\", \"_to\", \"_the\", \"_jungle\"]\n", + "```\n", + "\n", + "And so `len(map_pieces_to_words(pieces)) == len(input.split())`.\n", + "\n", + "## Setup\n", + "\n", + "Consider a model that receives three input tensors:\n", + "\n", + "- `input_ids`, a `torch.IntTensor` with a shape of `(batch_size, sequence_length)`\n", + "- `attention_mask`, a `torch.IntTensor` with a shape of `(batch_size, sequence_length)`\n", + "- `first_piece_mask`, a `torch.IntTensor` with a shape of `(batch_size, sequence length)`\n", + "\n", + "`input_ids` contains indices of word pieces in the vocabulary. `attention_mask` contains boolean values denoting valid and padded positions. `first_piece_mask` contains boolean values denoting whether a token is the first word piece of that word or not. All tensors are properly padded to the right. For example:\n", + "\n", + "```python\n", + "input_texts = [\"Welcome to the jungle\", \"Hello darkness my old friend\"]\n", + "input_pieces = [wordpiece_tokenize(text) for text in input_texts]\n", + "```\n", + "\n", + "Let's say that the output of this code would be:\n", + "```python\n", + ">>> input_pieces\n", + "[\n", + " [\"_Wel\", \"come\", \"_to\", \"_the\", \"_jungle\"],\n", + " [\"_He\", \"llo\", \"_dark\", \"ness\", \"_my\", \"_old\", \"_fri\", \"end\"]\n", + "]\n", + "```\n", + "\n", + "Creating our inputs:\n", + "```python\n", + ">>> input_ids = pad([pieces_to_ids(pieces) for pieces in input_pieces], pad_value=-1)\n", + ">>> input_ids\n", + "[\n", + " [10, 11, 12, 13, 14, -1, -1, -1], \n", + " [15, 16, 17, 18, 19, 20, 21, 22]\n", + "]\n", + "\n", + ">>> attention_mask = pad([[True]*len(pieces) for pieces in input_pieces], pad_value=0)\n", + ">>> attention_mask\n", + "[\n", + " [1, 1, 1, 1, 1, 0, 0, 0], \n", + " [1, 1, 1, 1, 1, 1, 1, 1]\n", + "]\n", + "\n", + ">>> first_piece_mask = pad([[p.startswith('_') for p in pieces] for pieces in input_pieces], pad_value=0)\n", + ">>> first_piece_mask\n", + "[\n", + " [1, 0, 1, 1, 1, 0, 0, 0], \n", + " [1, 0, 1, 0, 1, 1, 1, 0]\n", + "]\n", + "```\n", + "\n", + "Creating tensors:\n", + "\n", + "```python\n", + ">>> input_ids = torch.as_tensor(input_ids)\n", + ">>> attention_mask = torch.as_tensor(attention_mask)\n", + ">>> first_piece_mask = torch.as_tensor(first_piece_mask)\n", + ">>> input_ids.shape # batch_size = 2, sequence_length = 8\n", + "torch.Size([2, 8])\n", + "```\n", + "\n", + "That is it. Our setup is done. In actual code:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8967c50", + "metadata": {}, + "outputs": [], + "source": [ + "input_texts = [\"Welcome to the jungle\", \"Hello darkness my old friend\"]\n", + "input_pieces = [[\"_Wel\", \"come\", \"_to\", \"_the\", \"_jungle\"], \n", + " [\"_He\", \"llo\", \"_dark\", \"ness\", \"_my\", \"_old\", \"_fri\", \"end\"]]\n", + "input_ids = torch.as_tensor([[10, 11, 12, 13, 14, -1, -1, -1], [15, 16, 17, 18, 19, 20, 21, 22]])\n", + "attention_mask = torch.as_tensor([[1, 1, 1, 1, 1, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1]])\n", + "first_piece_mask = torch.as_tensor([[1, 0, 1, 1, 1, 0, 0, 0], [1, 0, 1, 0, 1, 1, 1, 0]])\n", + "batch_size, seq_len = input_ids.shape" + ] + }, + { + "cell_type": "markdown", + "id": "cbc39b91", + "metadata": {}, + "source": [ + "### First word-piece selection\n", + "\n", + "Now, how can we efficiently select the input ids of the **first word piece** for each sequence in the batch?\n", + "\n", + "Note that a simple binary-indexing strategy might gives us trouble for two reasons:\n", + "1. The number of 1s for each sequence in the batch differ.\n", + "2. Even if the number of 1s was equal for all sequences in the batch, binary indexing does not return a tensor with the same shape as the original tensor.\n", + "\n", + "If you try that out, the result would be the following:\n", + "```python\n", + ">>> input_ids[first_piece_mask.bool()]\n", + "tensor([10, 12, 13, 14, 15, 17, 19, 20, 21])\n", + "```\n", + "\n", + "Which is not what we want. To circumvent this behavior, we can resort to positional indexing. That is, we want to select the elements in the following positions:\n", + "```python\n", + "[\n", + " [0, 2, 3, 4], \n", + " [0, 2, 4, 5, 6]\n", + "]\n", + "```\n", + "\n", + "But here we also face the first issue, namely, that the number of \"selected ids\" is different for the two sequences in the batch. A simple fix to this problem is to _pad_ the first tensor with a dummy index value (e.g., `-1`).\n", + "\n", + "Therefore, we are looking for a function that is a vectorized version of `torch.nonzero`. In simple words, we want a function that returns the indices of nonzero input elements as a padded tensor. Here are two ways of achieving this ,one using for loops + `torch.nonzero`, and another using broadcasting. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be39ba85", + "metadata": {}, + "outputs": [], + "source": [ + "from torch.nn.utils.rnn import pad_sequence\n", + "\n", + "def pad(sequences, pad_value=0):\n", + " return pad_sequence(sequences, batch_first=True, padding_value=pad_value)\n", + "\n", + "def simple_padded_nonzero(mask: torch.LongTensor, pad_value: int = -1) -> torch.LongTensor:\n", + " \"\"\"\n", + " Returns a (right-padded) tensor containing indices of nonzero elements from a binary mask tensor.\n", + " \n", + " Example:\n", + " [[1, 0, 0, 1, 0, 1, 0, 1],\n", + " [1, 1, 0, 0, 0, 1, 0, 0]]\n", + " will be transformed to:\n", + " [[0, 3, 5, 7],\n", + " [0, 1, 5, -1]]\n", + " where -1 indicates pad positions.\n", + " \n", + " Args:\n", + " mask: torch.LongTensor with shape of (batch_size, sequence_length)\n", + " \n", + " Returns:\n", + " torch.LongTensor with shape of (batch_size, original_sequence_length)\n", + " where original_sequence_length = max(sum(mask, dim=-1))\n", + " \"\"\"\n", + " batch_size, seq_len = mask.shape\n", + " non_zero_tensors = [torch.nonzero(mask[i]).flatten() for i in range(batch_size)]\n", + " return pad(non_zero_tensors, pad_value).to(mask.device)\n", + "\n", + "def vectorized_padded_nonzero(mask: torch.LongTensor, pad_value: int = -1) -> torch.LongTensor:\n", + " non_zeros = mask.nonzero()\n", + " non_zero_rows = non_zeros[:, 0]\n", + " non_zero_cols = non_zeros[:, 1]\n", + " count_unique_ids = non_zero_rows.bincount().cumsum(dim=0).cpu()\n", + " non_zero_tensors = non_zero_cols.tensor_split(count_unique_ids[:-1])\n", + " return pad(non_zero_tensors, pad_value).to(mask.device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "78820f0d", + "metadata": {}, + "outputs": [], + "source": [ + "first_piece_idxs = simple_padded_nonzero(first_piece_mask)\n", + "first_piece_idxs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7482ffc8", + "metadata": {}, + "outputs": [], + "source": [ + "first_piece_idxs = vectorized_padded_nonzero(first_piece_mask)\n", + "first_piece_idxs" + ] + }, + { + "cell_type": "markdown", + "id": "cfebf6a7", + "metadata": {}, + "source": [ + "Great! Both versions return the same output. Later on we will compare everything in terms of running time to see the impact of vectorizing. Now that we have the indices of the first pieces, we can simply do an index selection as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8caa514", + "metadata": {}, + "outputs": [], + "source": [ + "ar = torch.arange(batch_size)\n", + "input_ids[ar.unsqueeze(-1), first_piece_idxs]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f5cc1491", + "metadata": {}, + "outputs": [], + "source": [ + "attention_mask[ar.unsqueeze(-1), first_piece_idxs]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ceb6ca4", + "metadata": {}, + "outputs": [], + "source": [ + "x = torch.randint(0, 2, size=(128, 512))\n", + "%timeit simple_padded_nonzero(x.cpu())\n", + "%timeit vectorized_padded_nonzero(x.cpu())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d7a4c74d", + "metadata": {}, + "outputs": [], + "source": [ + "%timeit simple_padded_nonzero(x.cuda())\n", + "%timeit vectorized_padded_nonzero(x.cuda())" + ] + }, + { + "cell_type": "markdown", + "id": "a209029d", + "metadata": {}, + "source": [ + "### Summing and averaging pieces\n", + "\n", + "Instead of selecting only the first piece of each word, we can think of other aggregation strategies, such as summing or averaging all piece vectors." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "73c4685d", + "metadata": {}, + "outputs": [], + "source": [ + "print(first_piece_mask)\n", + "print(first_piece_idxs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1cf36198", + "metadata": {}, + "outputs": [], + "source": [ + "first_piece_mask_aug = first_piece_mask + (1 - attention_mask)\n", + "last_piece_mask = ((first_piece_mask_aug - first_piece_mask_aug.roll(-1)) <= 0).long() * attention_mask\n", + "last_piece_idxs = vectorized_padded_nonzero(last_piece_mask)\n", + "print(last_piece_mask)\n", + "print(last_piece_idxs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3c933ff0", + "metadata": {}, + "outputs": [], + "source": [ + "cumsummed_pieces = input_ids.cumsum(dim=1)\n", + "cumsummed_pieces" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9b20add", + "metadata": {}, + "outputs": [], + "source": [ + "a = cumsummed_pieces[ar.unsqueeze(-1), last_piece_idxs]\n", + "shifted_cumsummed_pieces = torch.cat((torch.zeros(batch_size, 1), cumsummed_pieces[:, :-1]), dim=1)\n", + "summed_pieces = a - shifted_cumsummed_pieces[ar.unsqueeze(-1), first_piece_idxs]\n", + "summed_pieces" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79a7c61f", + "metadata": {}, + "outputs": [], + "source": [ + "lengths = last_piece_idxs - first_piece_idxs + 1\n", + "summed_pieces / lengths" + ] + }, + { + "cell_type": "markdown", + "id": "f4225cac", + "metadata": {}, + "source": [ + "---\n", + "\n", + "# Clustered attention\n", + "\n", + "Self-attention in transformers work with 3 tensors:\n", + "\n", + "- `queries` with a shape of `(batch_size, num_heads, sequence_length, hidden_size)`\n", + "- `keys` with a shape of `(batch_size, num_heads, sequence_length, hidden_size)`\n", + "- `values` with a shape of `(batch_size, num_heads, sequence_length, hidden_size)`\n", + "\n", + "Attention is computed as follows:\n", + "\n", + "```python\n", + "# 1. compute logits in O(n^2 * d)\n", + "logits = queries @ keys.transpose(-1, -2) / math.sqrt(hidden_size)\n", + "# 2. mask out padding positions\n", + "logits = torch.masked_fill(attention_mask == 0, -9999999.)\n", + "# 3. map logits to probabilities\n", + "probas = torch.softmax(logits, dim=-1)\n", + "# 4. compute a weighted sum of value vectors\n", + "output = probas @ values\n", + "```\n", + "\n", + "Let's say we want to improve the self-attention performance in transformers by working with clusters. The idea is that instead of compution all $n \\times n$ dot-products, we can map queries, keys, and values to some clusters, and then compute dot-product only inside those clusters. Concretely, if we have **balanced** $c$ clusters, we can reduce the self-attention cost to:\n", + "\n", + "$$\n", + "O \\left(c \\times \\frac{n}{c} \\times \\frac{n}{c} \\times d \\right) = O\\left(\\frac{n^2}{c} \\times d\\right)\n", + "$$\n", + "\n", + "If we set $c = \\sqrt{n}$, which is a reasonable choice for the number of clusters for most applications, we get $O(n\\sqrt{n} \\times d)$, which is better than the quadractic cost $O(n^2 \\times d)$.\n", + "\n", + "\n", + "In pratical terms, consider that you are given $c$ clusters as represented by their centroids for each head:\n", + "\n", + "- `centroids` with a shape of `(num_heads, num_centroids, hidden_size)`\n", + "\n", + "How can we compute attention efficiently?" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "7d0e803d", + "metadata": {}, + "outputs": [], + "source": [ + "def clustered_attention_sorted(q, k, v, centroids, mask):\n", + " # get sequence lengths for q and k (might be different for seq2seq problems)\n", + " q_seq_len = q.shape[-2]\n", + " k_seq_len = k.shape[-2]\n", + " batch_size = q.shape[0]\n", + " num_heads = q.shape[1]\n", + " num_centroids = centroids.shape[-2]\n", + " \n", + " # add `batch` dimension\n", + " # (batch_size, num_heads, 1, num_centroids, num_projections)\n", + " expanded_centroids = centroids[None, :, None, :, :].expand(batch_size, -1, 1, -1, -1)\n", + "\n", + " # add `middle` dimension\n", + " # (batch_size, num_heads, 1, q_seq_len, num_projections)\n", + " expanded_q = q.unsqueeze(2)\n", + " # (batch_size, num_heads, 1, k_seq_len, num_projections)\n", + " expanded_k = k.unsqueeze(2)\n", + "\n", + " # q_dists.shape is (batch, num_heads, 1, q_seq_len, num_centroids)\n", + " q_dists = torch.cdist(expanded_q, expanded_centroids, p=2)\n", + " # k_dists.shape is (batch, num_heads, 1, k_seq_len, num_centroids)\n", + " k_dists = torch.cdist(expanded_k, expanded_centroids, p=2)\n", + "\n", + " # q_clustered.shape is (batch, num_heads, 1, q_seq_len)\n", + " q_clustered = torch.argmin(q_dists, dim=-1)\n", + " # k_clustered.shape is (batch, num_heads, 1, k_seq_len)\n", + " k_clustered = torch.argmin(k_dists, dim=-1)\n", + "\n", + " # transpose to get `1` as different hashing rounds\n", + " # q_clustered.shape is (batch, num_heads, q_seq_len, 1)\n", + " q_clustered = q_clustered.transpose(2, 3)\n", + " # k_clustered.shape is (batch, num_heads, k_seq_len, 1)\n", + " k_clustered = k_clustered.transpose(2, 3)\n", + " \n", + " # deal with mask later, but we can also\n", + " # set cluster id for padding positions as `num_centroids` (ids start with 0)\n", + " # q_clustered = q_clustered.masked_fill(~mask.view(batch_size, 1, q_seq_len, 1), num_centroids)\n", + " # k_clustered = k_clustered.masked_fill(~mask.view(batch_size, 1, k_seq_len, 1), num_centroids)\n", + "\n", + " # we need to divide q_clustered into (similarly for k_clustered)\n", + " # (batch, num_heads, num_centroids, max_cluster_size_q_for_all_batch_and_heads, 1)\n", + "\n", + " # q_clustered_bin.shape is (batch, num_heads, q_seq_len, num_centroids)\n", + " q_clustered_bin = q_clustered == torch.arange(num_centroids, device=device)\n", + " # k_clustered_bin.shape is (batch, num_heads, k_seq_len, num_centroids)\n", + " k_clustered_bin = k_clustered == torch.arange(num_centroids, device=device)\n", + "\n", + " # q_clustered_bin.shape is (batch, num_heads, num_centroids, q_seq_len)\n", + " q_clustered_bin = q_clustered_bin.transpose(-1, -2).int()\n", + " # k_clustered_bin.shape is (batch, num_heads, num_centroids, k_seq_len)\n", + " k_clustered_bin = k_clustered_bin.transpose(-1, -2).int()\n", + "\n", + " # get the max cluster size across all batches and heads\n", + " max_cluster_size_q = q_clustered_bin.sum(-1).max().item()\n", + " max_cluster_size_k = k_clustered_bin.sum(-1).max().item()\n", + "\n", + " # utopically, max_cluster_size_q = q_seq_len / num_centroids\n", + " # but in this implementation I'm ignoring this assumption\n", + " # `q_clustered_vals` contains only 0 or 1 ints (due to one hot binarization)\n", + " q_clustered_vals, q_clustered_idxs = q_clustered_bin.sort(dim=-1, descending=True, stable=True)\n", + " k_clustered_vals, k_clustered_idxs = k_clustered_bin.sort(dim=-1, descending=True, stable=True)\n", + " # values that are 0 correspond to padding positions, so we mask them with q_seq_len - 1 (last token)\n", + " q_clustered_idxs[~q_clustered_vals.bool()] = q_seq_len - 1\n", + " k_clustered_idxs[~k_clustered_vals.bool()] = k_seq_len - 1\n", + " # get 0 and 1s as masks\n", + " mask_clustered_q = q_clustered_vals.bool()\n", + " mask_clustered_k = k_clustered_vals.bool()\n", + "\n", + " # deal with padding\n", + " lenghts = mask.sum(-1)[:, None, None, None]\n", + " pad_mask_bucketed_q = q_clustered_idxs < lenghts\n", + " pad_mask_bucketed_k = k_clustered_idxs < lenghts\n", + " \n", + " # combine masks\n", + " full_mask_bucketed_q = mask_clustered_q & pad_mask_bucketed_q\n", + " full_mask_bucketed_k = mask_clustered_k & pad_mask_bucketed_k\n", + "\n", + " # q_bucketed.shape is (batch, num_heads, num_centroids, max_cluster_size_q)\n", + " q_bucketed = q_clustered_idxs[:, :, :, :max_cluster_size_q]\n", + " # k_bucketed.shape is (batch, num_heads, num_centroids, max_cluster_size_k)\n", + " k_bucketed = k_clustered_idxs[:, :, :, :max_cluster_size_k]\n", + " # same shape as above\n", + " mask_bucketed_q = mask_clustered_q[:, :, :, :max_cluster_size_q]\n", + " mask_bucketed_k = mask_clustered_k[:, :, :, :max_cluster_size_k]\n", + " full_mask_bucketed_q = full_mask_bucketed_q[:, :, :, :max_cluster_size_q]\n", + " full_mask_bucketed_k = full_mask_bucketed_k[:, :, :, :max_cluster_size_k]\n", + " # create pairwise mask with shape (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " mask_bucketed = full_mask_bucketed_q.unsqueeze(-1) & full_mask_bucketed_k.unsqueeze(-2)\n", + "\n", + " # (batch, num_heads, num_clusters * max_cluster_size)\n", + " squished_inds_q = q_bucketed.reshape(batch_size, num_heads, -1)\n", + " squished_inds_k = k_bucketed.reshape(batch_size, num_heads, -1)\n", + "\n", + " # keys and values are bucketed with the same buckets\n", + " # the bucketed tensors are (batch, num_heads, num_clusters * max_cluster_size, head_size)\n", + " bucketed_q = q.gather(2, squished_inds_q.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + " bucketed_k = k.gather(2, squished_inds_k.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + " bucketed_v = v.gather(2, squished_inds_k.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + "\n", + " # we now expand the squished dim into (num_centroids, max_cluster_size)\n", + " bucketed_q = bucketed_q.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + " bucketed_k = bucketed_k.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + " bucketed_v = bucketed_v.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + "\n", + " # dots are (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " sqrt_d = head_size ** 0.5\n", + " dots = bucketed_q @ bucketed_k.transpose(-1, -2) / sqrt_d\n", + "\n", + " # mask the dots past key length; add `max_cluster_size_q` dim for broadcasting\n", + " neg_inf = -9999999.0\n", + " dots = dots.masked_fill(~mask_bucketed, neg_inf) # float('-inf') will generate nans in softmax\n", + "\n", + " # att_dist is (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " att_dist = torch.softmax(dots, dim=-1)\n", + "\n", + " # fix the uniform numbers for padding positions\n", + " att_dist = att_dist * mask_bucketed.float()\n", + "\n", + " # output is (batch, num_heads, num_centroids, max_cluster_size_q, head_size)\n", + " output = torch.matmul(att_dist, bucketed_v)\n", + "\n", + " # make sure squashed indices for pad positions are higher than last valid token id\n", + " squished_mask_q = mask_bucketed_q.reshape(batch_size, num_heads, -1)\n", + " # squished_mask_k = mask_bucketed_k.reshape(batch_size, num_heads, -1)\n", + " fixed_squished_inds_q = squished_inds_q.masked_fill(~squished_mask_q, q_seq_len + 1)\n", + " # fixed_squished_inds_k = squished_inds_q.masked_fill(~squished_mask_k, k_seq_len + 1)\n", + "\n", + " # get indices of valid contextualized query vectors\n", + " _, rev_inds_q = fixed_squished_inds_q.sort(dim=-1, stable=True)\n", + " # truncate to get only the first q_seq_len vectors -> the valid ones\n", + " rev_inds_q = rev_inds_q[:, :, :q_seq_len]\n", + " # fix order\n", + " rev_inds_q, _ = rev_inds_q.sort(dim=-1)\n", + "\n", + " # squish output and gather correct vectors\n", + " squished_output = output.view(batch_size, num_heads, -1, head_size)\n", + " # output.shape is (batch, num_heads, q_seq_len, head_size)\n", + " output = squished_output.gather(2, rev_inds_q.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + "\n", + " # concat heads back\n", + " output = output.transpose(1, 2).reshape(batch_size, -1, num_heads * head_size)\n", + " \n", + " return output" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "ab9a5a33", + "metadata": {}, + "outputs": [], + "source": [ + "from torch.nn.utils.rnn import pad_sequence\n", + "\n", + "def pad(sequences, pad_value=0):\n", + " return pad_sequence(sequences, batch_first=True, padding_value=pad_value)\n", + "\n", + "def clustered_attention_vectorized(q, k, v, centroids, mask):\n", + " # get sequence lengths for q and k (might be different for seq2seq problems)\n", + " q_seq_len = q.shape[-2]\n", + " k_seq_len = k.shape[-2]\n", + " batch_size = q.shape[0]\n", + " num_heads = q.shape[1]\n", + " num_centroids = centroids.shape[-2]\n", + " \n", + " # add `batch` dimension\n", + " # (batch_size, num_heads, 1, num_centroids, num_projections)\n", + " expanded_centroids = centroids[None, :, None, :, :].expand(batch_size, -1, 1, -1, -1)\n", + "\n", + " # add `middle` dimension\n", + " # (batch_size, num_heads, 1, q_seq_len, num_projections)\n", + " expanded_q = q.unsqueeze(2)\n", + " # (batch_size, num_heads, 1, k_seq_len, num_projections)\n", + " expanded_k = k.unsqueeze(2)\n", + "\n", + " # q_dists.shape is (batch, num_heads, 1, q_seq_len, num_centroids)\n", + " q_dists = torch.cdist(expanded_q, expanded_centroids, p=2)\n", + " # k_dists.shape is (batch, num_heads, 1, k_seq_len, num_centroids)\n", + " k_dists = torch.cdist(expanded_k, expanded_centroids, p=2)\n", + "\n", + " # q_clustered.shape is (batch, num_heads, 1, q_seq_len)\n", + " q_clustered = torch.argmin(q_dists, dim=-1)\n", + " # k_clustered.shape is (batch, num_heads, 1, k_seq_len)\n", + " k_clustered = torch.argmin(k_dists, dim=-1)\n", + "\n", + " # transpose to get `1` as different hashing rounds\n", + " # q_clustered.shape is (batch, num_heads, q_seq_len, 1)\n", + " q_clustered = q_clustered.transpose(2, 3)\n", + " # k_clustered.shape is (batch, num_heads, k_seq_len, 1)\n", + " k_clustered = k_clustered.transpose(2, 3)\n", + " \n", + " # we will deal with masking later, but we could\n", + " # set cluster id for padding positions as `num_centroids` (ids start with 0)\n", + " # q_clustered = q_clustered.masked_fill(~mask.view(batch_size, 1, q_seq_len, 1), num_centroids)\n", + " # k_clustered = k_clustered.masked_fill(~mask.view(batch_size, 1, k_seq_len, 1), num_centroids)\n", + "\n", + " # we need to divide q_clustered into (similarly for k_clustered)\n", + " # (batch, num_heads, num_centroids, max_cluster_size_q_for_all_batch_and_heads, 1)\n", + "\n", + " # q_clustered_bin.shape is (batch, num_heads, q_seq_len, num_centroids)\n", + " q_clustered_bin = q_clustered == torch.arange(num_centroids, device=device)\n", + " # k_clustered_bin.shape is (batch, num_heads, k_seq_len, num_centroids)\n", + " k_clustered_bin = k_clustered == torch.arange(num_centroids, device=device)\n", + "\n", + " # q_clustered_bin.shape is (batch, num_heads, num_centroids, q_seq_len)\n", + " q_clustered_bin = q_clustered_bin.transpose(-1, -2).int()\n", + " # k_clustered_bin.shape is (batch, num_heads, num_centroids, k_seq_len)\n", + " k_clustered_bin = k_clustered_bin.transpose(-1, -2).int()\n", + " \n", + " # arange tensors for queries and keys\n", + " q_ar = 1 + torch.arange(q_seq_len, device=device).view(1, 1, 1, -1).expand_as(q_clustered_bin)\n", + " k_ar = 1 + torch.arange(k_seq_len, device=device).view(1, 1, 1, -1).expand_as(k_clustered_bin)\n", + " \n", + " # q_nz.shape is (num_now_zero_entries, 4)\n", + " # where each column contains the nonzero ids for each original dimension,\n", + " # namely: batch, num_heads, num_centroids, q_seq_len\n", + " q_nz = (q_ar * q_clustered_bin).nonzero()\n", + " k_nz = (k_ar * k_clustered_bin).nonzero()\n", + " \n", + " # convert the first three columns into a single column\n", + " q_rows = q_nz[:, 0] * (num_heads * num_centroids) + q_nz[:, 1] * num_centroids + q_nz[:, 2]\n", + " k_rows = k_nz[:, 0] * (num_heads * num_centroids) + k_nz[:, 1] * num_centroids + k_nz[:, 2]\n", + " \n", + " # the last column is the sequence dimension (the one we care about) \n", + " q_cols = q_nz[:, -1]\n", + " k_cols = k_nz[:, -1]\n", + " \n", + " # count the number of unique row ids and cumsum them to create continuous slices \n", + " q_split_slices = q_rows.bincount().cumsum(dim=0)[:-1].cpu().tolist()\n", + " k_split_slices = k_rows.bincount().cumsum(dim=0)[:-1].cpu().tolist()\n", + " \n", + " # pad for missing slices since the last head of the last batch might be empty\n", + " num_total_centroids = batch_size * num_heads * num_centroids\n", + " q_num_missing_centroids = num_total_centroids - len(q_split_slices)\n", + " k_num_missing_centroids = num_total_centroids - len(k_split_slices)\n", + " q_split_slices.extend([q_split_slices[-1]] * (q_num_missing_centroids - 1))\n", + " k_split_slices.extend([k_split_slices[-1]] * (k_num_missing_centroids - 1))\n", + " \n", + " # merge the sequence ids in tensors following the slices\n", + " q_splited = q_cols.tensor_split(q_split_slices)\n", + " k_splited = k_cols.tensor_split(k_split_slices)\n", + " \n", + " # pad the smaller tensors with -1 and reshape back to \n", + " # (batch_size, num_heads, num_centroids, max_cluster_size_q_for_all_batch_and_heads)\n", + " q_bucketed_idxs = pad(q_splited, -1).view(batch_size, num_heads, num_centroids, -1)\n", + " k_bucketed_idxs = pad(k_splited, -1).view(batch_size, num_heads, num_centroids, -1)\n", + " \n", + " # get the max cluster size across all batches and heads\n", + " # utopically, max_cluster_size_q = q_seq_len / num_centroids\n", + " # but in this implementation I'm ignoring this assumption\n", + " max_cluster_size_q = q_bucketed_idxs.shape[-1]\n", + " max_cluster_size_k = k_bucketed_idxs.shape[-1]\n", + " mask_bucketed_q = q_bucketed_idxs != -1\n", + " mask_bucketed_k = k_bucketed_idxs != -1\n", + " \n", + " # deal with padding\n", + " lenghts = mask.sum(-1)[:, None, None, None]\n", + " pad_mask_bucketed_q = q_bucketed_idxs < lenghts\n", + " pad_mask_bucketed_k = k_bucketed_idxs < lenghts\n", + "\n", + " # combine masks\n", + " full_mask_bucketed_q = mask_bucketed_q & pad_mask_bucketed_q\n", + " full_mask_bucketed_k = mask_bucketed_k & pad_mask_bucketed_k\n", + " \n", + " # create pairwise mask with shape \n", + " # (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " # this is where having balanced clusters with num_centroids = sqrt(n)\n", + " # leads to performance improvements\n", + " mask_bucketed = full_mask_bucketed_q.unsqueeze(-1) & full_mask_bucketed_k.unsqueeze(-2)\n", + "\n", + " # (batch, num_heads, num_clusters * max_cluster_size)\n", + " q_bucketed = q_bucketed_idxs.masked_fill(~mask_bucketed_q, q_seq_len - 1)\n", + " k_bucketed = k_bucketed_idxs.masked_fill(~mask_bucketed_k, k_seq_len - 1)\n", + " squished_inds_q = q_bucketed.reshape(batch_size, num_heads, -1)\n", + " squished_inds_k = k_bucketed.reshape(batch_size, num_heads, -1)\n", + "\n", + " # keys and values are bucketed with the same ids\n", + " # the bucketed tensors are (batch, num_heads, num_clusters * max_cluster_size, head_size)\n", + " bucketed_q = q.gather(2, squished_inds_q.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + " bucketed_k = k.gather(2, squished_inds_k.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + " bucketed_v = v.gather(2, squished_inds_k.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + "\n", + " # we now expand the squished dim into (num_centroids, max_cluster_size)\n", + " bucketed_q = bucketed_q.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + " bucketed_k = bucketed_k.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + " bucketed_v = bucketed_v.view(batch_size, num_heads, num_centroids, -1, head_size)\n", + "\n", + " # dots are (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " sqrt_d = head_size ** 0.5\n", + " dots = bucketed_q @ bucketed_k.transpose(-1, -2) / sqrt_d\n", + " # mask the dots past key length; add `max_cluster_size_q` dim for broadcasting\n", + " # float('-inf') will generate nans in softmax, so we use a very small value\n", + " # instead. This happens because some clusters might be empty\n", + " neg_inf = -9999999.0\n", + " dots = dots.masked_fill(~mask_bucketed, neg_inf)\n", + "\n", + " # att_dist is (batch, num_heads, num_centroids, max_cluster_size_q, max_cluster_size_k)\n", + " att_dist = torch.softmax(dots, dim=-1)\n", + "\n", + " # fix the uniform numbers for padding positions\n", + " att_dist = att_dist * mask_bucketed.float()\n", + "\n", + " # output is (batch, num_heads, num_centroids, max_cluster_size_q, head_size)\n", + " att_output = torch.matmul(att_dist, bucketed_v)\n", + "\n", + " # squish output and mask\n", + " squished_output = att_output.view(batch_size, num_heads, -1, head_size)\n", + " squished_mask_q = mask_bucketed_q.view(batch_size, num_heads, -1)\n", + " \n", + " # get indices of valid contextualized query vectors\n", + " ar = torch.arange(num_centroids * max_cluster_size_q, device=device)\n", + " ar = ar.view(1, 1, -1).expand(batch_size, num_heads, -1)\n", + " squished_idxs_q = ar[squished_mask_q].view(batch_size, num_heads, -1)\n", + " \n", + " # output.shape is (batch, num_heads, q_seq_len, head_size)\n", + " output = squished_output.gather(2, squished_idxs_q.unsqueeze(-1).expand(-1, -1, -1, head_size))\n", + " \n", + " # concat heads back\n", + " # output.shape is (batch, q_seq_len, hidden_size)\n", + " output = output.transpose(1, 2).reshape(batch_size, -1, num_heads * head_size)\n", + " \n", + " return output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "349ff753", + "metadata": {}, + "outputs": [], + "source": [ + "batch_size = 2\n", + "sequence_length = 2048\n", + "num_heads = 4\n", + "head_size = 4\n", + "hidden_size = num_heads * head_size\n", + "num_centroids = 3\n", + "device = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "\n", + "centroids = torch.randn(num_heads, num_centroids, hidden_size).to(device)\n", + "q = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "k = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "v = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "mask = torch.ones(batch_size, sequence_length).bool().to(device)\n", + "# mask = torch.tensor([5, 8]).unsqueeze(-1) >= torch.arange(sequence_length).unsqueeze(0).expand(batch_size, -1)\n", + "# mask = mask.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7f175a1", + "metadata": {}, + "outputs": [], + "source": [ + "with torch.no_grad():\n", + " %timeit clustered_attention_sorted(q, k, v, centroids, mask).shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "998c2e1c", + "metadata": {}, + "outputs": [], + "source": [ + "with torch.no_grad():\n", + " %timeit clustered_attention_vectorized(q, k, v, centroids, mask).shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7d3c4aa", + "metadata": {}, + "outputs": [], + "source": [ + "del q, k, v, mask, centroids\n", + "torch.cuda.empty_cache()" + ] + }, + { + "cell_type": "markdown", + "id": "741e7bce", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## Timing" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "28dae7e6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[---------- clustered attention ----------]\n", + " | sorted | vectorized\n", + "1 threads: --------------------------------\n", + " [2, 64] | 1.0 | 1.4 \n", + " [2, 128] | 1.1 | 1.4 \n", + " [2, 512] | 1.1 | 1.6 \n", + " [2, 1024] | 1.1 | 1.7 \n", + " [2, 2048] | 1.1 | 1.8 \n", + " [2, 4096] | 2.6 | 2.8 \n", + " [2, 8192] | 10.6 | 10.3 \n", + " [2, 16384] | 33.2 | 31.5 \n", + " [3, 64] | 1.0 | 1.4 \n", + " [3, 128] | 1.1 | 1.4 \n", + " [3, 512] | 1.1 | 1.5 \n", + " [3, 1024] | 1.2 | 1.8 \n", + " [3, 2048] | 1.8 | 2.3 \n", + " [3, 4096] | 2.3 | 2.5 \n", + " [3, 8192] | 8.0 | 7.4 \n", + " [3, 16384] | 63.3 | 55.8 \n", + " [5, 64] | 1.0 | 1.4 \n", + " [5, 128] | 1.1 | 1.4 \n", + " [5, 512] | 1.2 | 1.5 \n", + " [5, 1024] | 1.2 | 1.5 \n", + " [5, 2048] | 2.0 | 2.4 \n", + " [5, 4096] | 2.3 | 2.4 \n", + " [5, 8192] | 5.5 | 4.8 \n", + " [5, 16384] | 13.4 | 10.8 \n", + " [9, 64] | 1.0 | 1.5 \n", + " [9, 128] | 1.1 | 1.5 \n", + " [9, 512] | 1.2 | 1.5 \n", + " [9, 1024] | 1.2 | 1.6 \n", + " [9, 2048] | 1.9 | 2.2 \n", + " [9, 4096] | 3.4 | 3.0 \n", + " [9, 8192] | 8.8 | 6.3 \n", + " [9, 16384] | 29.1 | 18.8 \n", + " [11, 64] | 1.0 | 1.5 \n", + " [11, 128] | 1.1 | 1.5 \n", + " [11, 512] | 1.2 | 1.5 \n", + " [11, 1024] | 1.2 | 1.5 \n", + " [11, 2048] | 1.4 | 1.8 \n", + " [11, 4096] | 2.7 | 2.5 \n", + " [11, 8192] | 11.2 | 7.2 \n", + " [11, 16384] | 44.3 | 25.7 \n", + "\n", + "Times are in milliseconds (ms).\n", + "\n" + ] + } + ], + "source": [ + "import torch.utils.benchmark as benchmark\n", + "from itertools import product\n", + "\n", + "batch_size = 1\n", + "num_heads = 1\n", + "head_size = 1\n", + "hidden_size = num_heads * head_size\n", + "sequence_lengths = [64, 128, 512, 1024, 2048, 4096, 8192, 8192*2]\n", + "num_centroids = [2, 3, 5, 9, 11]\n", + "device = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "num_threads = 1\n", + "results = []\n", + "\n", + "for num_c, seq_len in product(num_centroids, sequence_lengths):\n", + " label = 'clustered attention'\n", + " sub_label = f'[{num_c}, {seq_len}]'\n", + " q = torch.randn(batch_size, num_heads, seq_len, hidden_size).to(device)\n", + " k = torch.randn(batch_size, num_heads, seq_len, hidden_size).to(device)\n", + " v = torch.randn(batch_size, num_heads, seq_len, hidden_size).to(device)\n", + " centroids = torch.randn(num_heads, num_c, hidden_size).to(device)\n", + " mask = torch.ones(batch_size, seq_len).bool().to(device)\n", + " results.append(benchmark.Timer(\n", + " stmt='clustered_attention_sorted(q, k, v, centroids, mask)',\n", + " setup='from __main__ import clustered_attention_sorted',\n", + " globals={'q': q, 'k': k, 'v': v, 'centroids': centroids, 'mask': mask},\n", + " num_threads=num_threads,\n", + " label=label,\n", + " sub_label=sub_label,\n", + " description='sorted',\n", + " ).blocked_autorange(min_run_time=1))\n", + " results.append(benchmark.Timer(\n", + " stmt='clustered_attention_vectorized(q, k, v, centroids, mask)',\n", + " setup='from __main__ import clustered_attention_vectorized',\n", + " globals={'q': q, 'k': k, 'v': v, 'centroids': centroids, 'mask': mask},\n", + " num_threads=num_threads,\n", + " label=label,\n", + " sub_label=sub_label,\n", + " description='vectorized',\n", + " ).blocked_autorange(min_run_time=1))\n", + "\n", + "compare = benchmark.Compare(results)\n", + "compare.print()" + ] + }, + { + "cell_type": "markdown", + "id": "0d459a84", + "metadata": {}, + "source": [ + "--- \n", + "## Profiling" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "ce131f7b", + "metadata": {}, + "outputs": [], + "source": [ + "import torch.autograd.profiler as profiler\n", + "\n", + "batch_size = 2\n", + "sequence_length = 1024\n", + "num_heads = 4\n", + "head_size = 4\n", + "hidden_size = num_heads * head_size\n", + "num_centroids = 3\n", + "device = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "centroids = torch.randn(num_heads, num_centroids, hidden_size).to(device)\n", + "q = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "k = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "v = torch.randn(batch_size, num_heads, sequence_length, hidden_size).to(device)\n", + "mask = torch.ones(batch_size, sequence_length).bool().to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "142bb880", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + " Name Self CPU % Self CPU CPU total % CPU total CPU time avg Self CUDA Self CUDA % CUDA total CUDA time avg CPU Mem Self CPU Mem CUDA Mem Self CUDA Mem # of Calls Total MFLOPs \n", + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + " aten::sort 2.32% 655.000us 4.78% 1.349ms 337.250us 1.826ms 19.32% 2.006ms 501.500us 0 b 0 b 957.00 Kb -776.00 Kb 4 -- \n", + " aten::bmm 0.33% 94.000us 0.45% 126.000us 31.500us 850.000us 8.99% 850.000us 212.500us 0 b 0 b 42.15 Mb 42.15 Mb 4 176.729 \n", + " aten::copy_ 1.04% 295.000us 1.80% 509.000us 19.577us 724.000us 7.66% 724.000us 27.846us 0 b 0 b 0 b 0 b 26 -- \n", + " aten::mul 0.39% 110.000us 0.72% 204.000us 40.800us 563.000us 5.96% 620.000us 124.000us 0 b 0 b 43.71 Mb 42.71 Mb 5 11.459 \n", + " aten::as_strided 0.93% 263.000us 1.54% 436.000us 5.190us 327.000us 3.46% 327.000us 3.893us 0 b 0 b 0 b 0 b 84 -- \n", + " aten::slice 0.90% 255.000us 1.86% 525.000us 16.935us 316.000us 3.34% 435.000us 14.032us 0 b 0 b 0 b 0 b 31 -- \n", + " aten::sum 0.76% 216.000us 1.67% 471.000us 67.286us 316.000us 3.34% 507.000us 72.429us 0 b 0 b 66.50 Kb -333.50 Kb 7 -- \n", + " aten::_euclidean_dist 0.86% 244.000us 6.12% 1.727ms 863.500us 276.000us 2.92% 1.725ms 862.500us 0 b 0 b 192.00 Kb -3.26 Mb 2 -- \n", + " aten::masked_fill_ 0.16% 46.000us 0.26% 74.000us 18.500us 258.000us 2.73% 258.000us 64.500us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::bitwise_and 0.21% 59.000us 0.29% 82.000us 27.333us 257.000us 2.72% 257.000us 85.667us 0 b 0 b 10.48 Mb 10.48 Mb 3 -- \n", + " aten::_softmax 0.07% 21.000us 0.11% 30.000us 30.000us 223.000us 2.36% 223.000us 223.000us 0 b 0 b 41.71 Mb 41.71 Mb 1 -- \n", + " aten::empty 0.95% 267.000us 1.08% 305.000us 14.524us 196.000us 2.07% 196.000us 9.333us 0 b 0 b 42.29 Mb 42.29 Mb 21 -- \n", + " aten::expand 0.53% 149.000us 1.06% 300.000us 17.647us 189.000us 2.00% 253.000us 14.882us 0 b 0 b 0 b 0 b 17 -- \n", + " aten::div 0.08% 23.000us 0.11% 30.000us 30.000us 184.000us 1.95% 184.000us 184.000us 0 b 0 b 41.71 Mb 41.71 Mb 1 -- \n", + " aten::empty_strided 0.95% 269.000us 1.10% 312.000us 13.000us 182.000us 1.93% 182.000us 7.583us 0 b 0 b 44.07 Mb 44.07 Mb 24 -- \n", + " aten::pow 0.41% 115.000us 0.71% 200.000us 50.000us 164.000us 1.73% 199.000us 49.750us 0 b 0 b 1.00 Mb 1.00 Mb 4 -- \n", + " aten::_to_copy 0.47% 134.000us 1.80% 509.000us 50.900us 155.000us 1.64% 604.000us 60.400us 0 b 0 b 42.39 Mb 0 b 10 -- \n", + " aten::gather 0.44% 124.000us 0.73% 205.000us 51.250us 155.000us 1.64% 183.000us 45.750us 0 b 0 b 888.00 Kb 888.00 Kb 4 -- \n", + " aten::matmul 0.51% 145.000us 2.32% 656.000us 164.000us 153.000us 1.62% 1.242ms 310.500us 0 b 0 b 42.15 Mb 0 b 4 -- \n", + " aten::_cat 0.28% 80.000us 0.78% 220.000us 55.000us 148.000us 1.57% 234.000us 58.500us 0 b 0 b 1.13 Mb 0 b 4 -- \n", + " aten::cdist 0.35% 99.000us 7.80% 2.202ms 1.101ms 135.000us 1.43% 2.200ms 1.100ms 0 b 0 b 192.00 Kb -3.00 Kb 2 -- \n", + " aten::unsqueeze 0.48% 136.000us 60.82% 17.174ms 1.321ms 134.000us 1.42% 186.000us 14.308us 0 b 0 b 0 b 0 b 13 -- \n", + " aten::to 0.29% 83.000us 2.27% 641.000us 45.786us 108.000us 1.14% 712.000us 50.857us 0 b 0 b 42.39 Mb 0 b 14 -- \n", + " aten::empty_like 0.41% 116.000us 1.44% 407.000us 29.071us 106.000us 1.12% 229.000us 16.357us 0 b 0 b 42.42 Mb 0 b 14 -- \n", + " aten::reshape 0.39% 111.000us 1.93% 546.000us 45.500us 104.000us 1.10% 248.000us 20.667us 0 b 0 b 397.50 Kb 0 b 12 -- \n", + " aten::arange 0.29% 83.000us 1.04% 294.000us 49.000us 101.000us 1.07% 227.000us 37.833us 0 b 0 b 18.00 Kb 0 b 6 -- \n", + " aten::transpose 0.24% 67.000us 0.50% 142.000us 17.750us 91.000us 0.96% 124.000us 15.500us 0 b 0 b 0 b 0 b 8 -- \n", + " aten::argmin 0.21% 58.000us 0.32% 91.000us 45.500us 89.000us 0.94% 98.000us 49.000us 0 b 0 b 128.00 Kb 128.00 Kb 2 -- \n", + " aten::clone 0.39% 110.000us 2.17% 613.000us 76.625us 87.000us 0.92% 508.000us 63.500us 0 b 0 b 42.23 Mb 0 b 8 -- \n", + " aten::max 0.18% 52.000us 0.39% 111.000us 55.500us 80.000us 0.85% 115.000us 57.500us 0 b 0 b 1.00 Kb 0 b 2 -- \n", + " aten::view 0.29% 83.000us 0.43% 122.000us 6.421us 79.000us 0.84% 79.000us 4.158us 0 b 0 b 0 b 0 b 19 -- \n", + " aten::resize_ 0.27% 75.000us 0.31% 87.000us 12.429us 79.000us 0.84% 79.000us 11.286us 0 b 0 b 1.14 Mb 1.14 Mb 7 -- \n", + " aten::bitwise_not 0.27% 77.000us 0.36% 102.000us 25.500us 78.000us 0.83% 78.000us 19.500us 0 b 0 b 10.49 Mb 10.49 Mb 4 -- \n", + " aten::ones_like 0.18% 50.000us 0.86% 244.000us 61.000us 72.000us 0.76% 238.000us 59.500us 0 b 0 b 65.00 Kb 0 b 4 -- \n", + " aten::_local_scalar_dense 0.09% 25.000us 0.27% 77.000us 19.250us 70.000us 0.74% 70.000us 17.500us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::fill_ 0.13% 37.000us 0.23% 65.000us 16.250us 65.000us 0.69% 65.000us 16.250us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::eq 0.16% 45.000us 0.21% 60.000us 30.000us 62.000us 0.66% 62.000us 31.000us 0 b 0 b 48.00 Kb 48.00 Kb 2 -- \n", + " aten::lt 0.15% 41.000us 0.20% 57.000us 28.500us 57.000us 0.60% 57.000us 28.500us 0 b 0 b 48.00 Kb 48.00 Kb 2 -- \n", + " aten::cat 0.12% 34.000us 0.96% 271.000us 67.750us 41.000us 0.43% 275.000us 68.750us 0 b 0 b 1.13 Mb 0 b 4 -- \n", + " aten::clamp_min 0.08% 24.000us 0.14% 39.000us 19.500us 39.000us 0.41% 39.000us 19.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::item 0.10% 28.000us 0.44% 123.000us 30.750us 39.000us 0.41% 109.000us 27.250us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::_unsafe_view 0.20% 57.000us 0.46% 131.000us 16.375us 35.000us 0.37% 54.000us 6.750us 0 b 0 b 0 b 0 b 8 -- \n", + " aten::sqrt_ 0.07% 19.000us 0.12% 34.000us 17.000us 33.000us 0.35% 33.000us 16.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::_reshape_alias 0.11% 30.000us 0.16% 45.000us 5.625us 32.000us 0.34% 32.000us 4.000us 0 b 0 b 0 b 0 b 8 -- \n", + " aten::__and__ 0.08% 22.000us 0.42% 118.000us 39.333us 30.000us 0.32% 287.000us 95.667us 0 b 0 b 10.48 Mb 0 b 3 -- \n", + " aten::contiguous 0.06% 16.000us 0.82% 231.000us 115.500us 25.000us 0.26% 230.000us 115.000us 0 b 0 b 3.00 Kb 0 b 2 -- \n", + " aten::_index_put_impl_ 0.11% 30.000us 0.40% 112.000us 56.000us 25.000us 0.26% 69.000us 34.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::mT 0.06% 16.000us 0.20% 57.000us 28.500us 22.000us 0.23% 54.000us 27.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::result_type 0.04% 10.000us 0.07% 19.000us 4.750us 18.000us 0.19% 18.000us 4.500us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::clamp_min_ 0.05% 15.000us 0.22% 62.000us 31.000us 18.000us 0.19% 57.000us 28.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::index_put_ 0.07% 20.000us 0.50% 141.000us 70.500us 18.000us 0.19% 87.000us 43.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::masked_fill 0.10% 28.000us 0.74% 210.000us 105.000us 13.000us 0.14% 466.000us 233.000us 0 b 0 b 41.84 Mb 0 b 2 -- \n", + " aten::softmax 0.04% 10.000us 0.16% 44.000us 44.000us 6.000us 0.06% 229.000us 229.000us 0 b 0 b 41.71 Mb 0 b 1 -- \n", + " cudaEventDestroy 7.06% 1.995ms 7.06% 1.995ms 1.178us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1694 -- \n", + " cudaEventCreate 0.38% 107.000us 0.38% 107.000us 0.123us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 870 -- \n", + " cudaEventRecord 65.60% 18.525ms 65.60% 18.525ms 21.293us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 870 -- \n", + " cudaLaunchKernel 2.40% 678.000us 2.40% 678.000us 4.291us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 158 -- \n", + " [memory] 0.00% 0.000us 0.00% 0.000us 0.000us 0.000us 0.00% 0.000us 0.000us 0 b 0 b -274.50 Mb -274.50 Mb 58 -- \n", + " cudaMemcpyAsync 0.24% 67.000us 0.24% 67.000us 16.750us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 4 -- \n", + " cudaStreamSynchronize 0.02% 6.000us 0.02% 6.000us 3.000us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 2 -- \n", + " cudaDeviceGetAttribute 0.00% 1.000us 0.00% 1.000us 0.083us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 12 -- \n", + "cudaOccupancyMaxActiveBlocksPerMultiprocessorWithFla... 0.04% 12.000us 0.04% 12.000us 0.167us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 72 -- \n", + " cudaPeekAtLastError 0.00% 0.000us 0.00% 0.000us 0.000us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 138 -- \n", + " cudaDeviceSynchronize 5.58% 1.576ms 5.58% 1.576ms 1.576ms 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1 -- \n", + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + "Self CPU time total: 28.238ms\n", + "Self CUDA time total: 9.453ms\n", + "\n" + ] + } + ], + "source": [ + "with profiler.profile(profile_memory=True, use_cuda=True, with_flops=True) as prof:\n", + " out = clustered_attention_sorted(q, k, v, centroids, mask)\n", + "\n", + "print(prof.key_averages().table(sort_by=\"self_cuda_time_total\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "33adb9f6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + " Name Self CPU % Self CPU CPU total % CPU total CPU time avg Self CUDA Self CUDA % CUDA total CUDA time avg CPU Mem Self CPU Mem CUDA Mem Self CUDA Mem # of Calls Total KFLOPs \n", + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + " aten::copy_ 1.66% 562.000us 5.81% 1.972ms 30.812us 1.478ms 9.66% 1.478ms 23.094us 0 b 0 b 0 b 0 b 64 -- \n", + " aten::bincount 0.38% 128.000us 2.46% 836.000us 418.000us 1.342ms 8.77% 1.950ms 975.000us 0 b -32 b 1.00 Kb -2.00 Kb 2 -- \n", + " aten::slice 2.64% 896.000us 5.39% 1.829ms 16.627us 1.284ms 8.39% 1.722ms 15.655us 0 b 0 b 0 b 0 b 110 -- \n", + " aten::pad_sequence 2.05% 696.000us 11.63% 3.950ms 1.975ms 1.079ms 7.05% 3.948ms 1.974ms 0 b 0 b 253.50 Kb 0 b 2 -- \n", + " aten::bmm 0.27% 92.000us 0.38% 128.000us 32.000us 1.039ms 6.79% 1.039ms 259.750us 0 b 0 b 42.15 Mb 42.15 Mb 4 176729.088 \n", + " aten::as_strided 1.99% 676.000us 3.30% 1.119ms 4.886us 933.000us 6.10% 933.000us 4.074us 0 b 0 b 0 b 0 b 229 -- \n", + " aten::select 1.52% 516.000us 2.97% 1.009ms 17.102us 723.000us 4.72% 970.000us 16.441us 0 b 0 b 0 b 0 b 59 -- \n", + " aten::mul 0.67% 227.000us 1.08% 366.000us 33.273us 566.000us 3.70% 622.000us 56.545us 0 b 0 b 44.34 Mb 43.34 Mb 11 11541.184 \n", + " aten::narrow 0.98% 334.000us 3.86% 1.312ms 27.333us 486.000us 3.18% 1.261ms 26.271us 0 b 0 b 0 b 0 b 48 -- \n", + " aten::nonzero 0.76% 258.000us 6.63% 2.252ms 750.667us 419.000us 2.74% 568.000us 189.333us 0 b 0 b 704.00 Kb 0 b 3 -- \n", + " aten::empty 0.99% 336.000us 1.14% 386.000us 13.786us 329.000us 2.15% 329.000us 11.750us 0 b 0 b 42.47 Mb 42.47 Mb 28 -- \n", + " aten::tensor_split 0.61% 206.000us 3.19% 1.082ms 541.000us 326.000us 2.13% 1.079ms 539.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::_euclidean_dist 0.71% 242.000us 5.09% 1.730ms 865.000us 266.000us 1.74% 1.729ms 864.500us 0 b 0 b 192.00 Kb -3.26 Mb 2 -- \n", + " aten::bitwise_and 0.17% 58.000us 0.24% 81.000us 27.000us 263.000us 1.72% 263.000us 87.667us 0 b 0 b 10.46 Mb 10.46 Mb 3 -- \n", + " aten::masked_fill_ 0.10% 33.000us 0.16% 54.000us 18.000us 227.000us 1.48% 227.000us 75.667us 0 b 0 b 0 b 0 b 3 -- \n", + " aten::_softmax 0.06% 21.000us 0.09% 29.000us 29.000us 219.000us 1.43% 219.000us 219.000us 0 b 0 b 41.71 Mb 41.71 Mb 1 -- \n", + " aten::expand 0.53% 179.000us 1.02% 348.000us 17.400us 215.000us 1.40% 297.000us 14.850us 0 b 0 b 0 b 0 b 20 -- \n", + " aten::sum 0.37% 127.000us 0.69% 235.000us 47.000us 196.000us 1.28% 263.000us 52.600us 0 b 0 b 65.50 Kb 49.50 Kb 5 -- \n", + " aten::gather 0.36% 121.000us 0.59% 201.000us 50.250us 187.000us 1.22% 220.000us 55.000us 0 b 0 b 888.00 Kb 888.00 Kb 4 -- \n", + " aten::div 0.07% 23.000us 0.09% 30.000us 30.000us 186.000us 1.22% 186.000us 186.000us 0 b 0 b 41.71 Mb 41.71 Mb 1 -- \n", + " aten::arange 0.39% 133.000us 1.42% 481.000us 48.100us 179.000us 1.17% 405.000us 40.500us 0 b 0 b 66.00 Kb 0 b 10 -- \n", + " aten::pow 0.35% 119.000us 0.58% 198.000us 49.500us 167.000us 1.09% 200.000us 50.000us 0 b 0 b 1.00 Mb 1.00 Mb 4 -- \n", + " aten::matmul 0.44% 151.000us 1.95% 661.000us 165.250us 159.000us 1.04% 1.431ms 357.750us 0 b 0 b 42.15 Mb 0 b 4 -- \n", + " aten::add 0.34% 117.000us 0.48% 164.000us 27.333us 159.000us 1.04% 159.000us 26.500us 0 b 0 b 640.00 Kb 640.00 Kb 6 81.920 \n", + " aten::_to_copy 0.39% 134.000us 4.19% 1.423ms 142.300us 154.000us 1.01% 685.000us 68.500us 400 b 0 b 41.92 Mb 0 b 10 -- \n", + " aten::unsqueeze 0.41% 139.000us 48.67% 16.526ms 1.271ms 151.000us 0.99% 201.000us 15.462us 0 b 0 b 0 b 0 b 13 -- \n", + " aten::reshape 0.32% 109.000us 0.93% 316.000us 22.571us 150.000us 0.98% 283.000us 20.214us 0 b 0 b 128.00 Kb 0 b 14 -- \n", + " aten::_cat 0.25% 86.000us 0.64% 216.000us 54.000us 149.000us 0.97% 233.000us 58.250us 0 b 0 b 1.13 Mb 0 b 4 -- \n", + " aten::cdist 0.28% 95.000us 6.50% 2.206ms 1.103ms 139.000us 0.91% 2.203ms 1.101ms 0 b 0 b 192.00 Kb -3.00 Kb 2 -- \n", + " aten::resize_ 0.37% 126.000us 0.45% 152.000us 12.667us 136.000us 0.89% 136.000us 11.333us 0 b 0 b 1.85 Mb 1.85 Mb 12 -- \n", + " aten::fill_ 0.22% 74.000us 0.38% 130.000us 16.250us 131.000us 0.86% 131.000us 16.375us 0 b 0 b 0 b 0 b 8 -- \n", + " aten::transpose 0.28% 96.000us 0.57% 194.000us 17.636us 122.000us 0.80% 172.000us 15.636us 0 b 0 b 0 b 0 b 11 -- \n", + " aten::view 0.30% 103.000us 0.43% 146.000us 6.636us 110.000us 0.72% 110.000us 5.000us 0 b 0 b 0 b 0 b 22 -- \n", + " aten::to 0.28% 94.000us 4.61% 1.564ms 97.750us 105.000us 0.69% 790.000us 49.375us 400 b 0 b 41.92 Mb 0 b 16 -- \n", + " aten::empty_like 0.24% 82.000us 0.84% 286.000us 28.600us 99.000us 0.65% 220.000us 22.000us 0 b 0 b 42.15 Mb 0 b 10 -- \n", + " aten::min 0.14% 49.000us 0.32% 109.000us 54.500us 94.000us 0.61% 129.000us 64.500us 0 b 0 b 1.00 Kb 0 b 2 -- \n", + " aten::index 0.19% 66.000us 6.01% 2.042ms 2.042ms 94.000us 0.61% 359.000us 359.000us 0 b 0 b 64.00 Kb -192.00 Kb 1 -- \n", + " aten::max 0.14% 48.000us 0.39% 131.000us 65.500us 93.000us 0.61% 149.000us 74.500us 0 b 0 b 1.00 Kb 0 b 2 -- \n", + " aten::argmin 0.17% 59.000us 0.26% 88.000us 44.000us 90.000us 0.59% 98.000us 49.000us 0 b 0 b 128.00 Kb 128.00 Kb 2 -- \n", + " aten::clone 0.25% 86.000us 1.34% 454.000us 75.667us 87.000us 0.57% 523.000us 87.167us 0 b 0 b 42.09 Mb 0 b 6 -- \n", + " aten::empty_strided 0.27% 93.000us 0.34% 114.000us 11.400us 85.000us 0.56% 85.000us 8.500us 400 b 400 b 41.92 Mb 41.92 Mb 10 -- \n", + " aten::bitwise_not 0.16% 53.000us 0.22% 74.000us 24.667us 82.000us 0.54% 82.000us 27.333us 0 b 0 b 10.46 Mb 10.46 Mb 3 -- \n", + " aten::ones_like 0.14% 46.000us 0.71% 240.000us 60.000us 77.000us 0.50% 238.000us 59.500us 0 b 0 b 65.00 Kb 0 b 4 -- \n", + " aten::ne 0.17% 58.000us 0.21% 73.000us 36.500us 72.000us 0.47% 72.000us 36.000us 0 b 0 b 32.00 Kb 32.00 Kb 2 -- \n", + " aten::eq 0.13% 44.000us 0.17% 59.000us 29.500us 62.000us 0.41% 62.000us 31.000us 0 b 0 b 48.00 Kb 48.00 Kb 2 -- \n", + " aten::lt 0.12% 40.000us 0.16% 56.000us 28.000us 58.000us 0.38% 58.000us 29.000us 0 b 0 b 32.00 Kb 32.00 Kb 2 -- \n", + " aten::_reshape_alias 0.13% 45.000us 0.21% 72.000us 5.538us 56.000us 0.37% 56.000us 4.308us 0 b 0 b 0 b 0 b 13 -- \n", + " aten::set_ 0.11% 39.000us 0.23% 77.000us 12.833us 54.000us 0.35% 74.000us 12.333us 0 b 0 b 0 b 0 b 6 -- \n", + " aten::cat 0.10% 34.000us 0.79% 267.000us 66.750us 39.000us 0.25% 272.000us 68.000us 0 b 0 b 1.13 Mb 0 b 4 -- \n", + " aten::full 0.08% 26.000us 0.30% 102.000us 51.000us 38.000us 0.25% 101.000us 50.500us 0 b 0 b 253.50 Kb 0 b 2 -- \n", + " aten::sqrt_ 0.06% 22.000us 0.10% 34.000us 17.000us 35.000us 0.23% 35.000us 17.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::clamp_min 0.06% 22.000us 0.11% 38.000us 19.000us 34.000us 0.22% 34.000us 17.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::__and__ 0.07% 23.000us 0.34% 116.000us 38.667us 34.000us 0.22% 297.000us 99.000us 0 b 0 b 10.46 Mb 0 b 3 -- \n", + " aten::masked_fill 0.13% 44.000us 0.89% 301.000us 100.333us 30.000us 0.20% 513.000us 171.000us 0 b 0 b 41.96 Mb 0 b 3 -- \n", + " aten::_unsafe_view 0.12% 40.000us 0.25% 84.000us 16.800us 28.000us 0.18% 41.000us 8.200us 0 b 0 b 0 b 0 b 5 -- \n", + " aten::cumsum 0.30% 103.000us 0.42% 141.000us 70.500us 28.000us 0.18% 32.000us 16.000us 0 b 0 b 1.00 Kb 1.00 Kb 2 -- \n", + " aten::zero_ 0.05% 18.000us 0.18% 60.000us 30.000us 25.000us 0.16% 60.000us 30.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::expand_as 0.04% 14.000us 0.17% 58.000us 29.000us 24.000us 0.16% 57.000us 28.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::mT 0.04% 15.000us 0.17% 57.000us 28.500us 23.000us 0.15% 55.000us 27.500us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::contiguous 0.05% 17.000us 0.68% 230.000us 115.000us 22.000us 0.14% 228.000us 114.000us 0 b 0 b 3.00 Kb 0 b 2 -- \n", + " aten::t 0.07% 23.000us 0.26% 88.000us 29.333us 19.000us 0.12% 59.000us 19.667us 0 b 0 b 0 b 0 b 3 -- \n", + " aten::result_type 0.04% 12.000us 0.06% 20.000us 5.000us 17.000us 0.11% 17.000us 4.250us 0 b 0 b 0 b 0 b 4 -- \n", + " aten::clamp_min_ 0.05% 17.000us 0.19% 63.000us 31.500us 14.000us 0.09% 48.000us 24.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::resolve_conj 0.02% 6.000us 0.03% 10.000us 5.000us 8.000us 0.05% 8.000us 4.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::resolve_neg 0.01% 5.000us 0.03% 9.000us 4.500us 8.000us 0.05% 8.000us 4.000us 0 b 0 b 0 b 0 b 2 -- \n", + " aten::softmax 0.03% 10.000us 0.13% 43.000us 43.000us 4.000us 0.03% 223.000us 223.000us 0 b 0 b 41.71 Mb 0 b 1 -- \n", + " cudaEventDestroy 6.54% 2.220ms 6.54% 2.220ms 1.311us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1694 -- \n", + " cudaEventCreate 0.50% 169.000us 0.50% 169.000us 0.100us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1694 -- \n", + " cudaEventRecord 57.08% 19.383ms 57.08% 19.383ms 11.442us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1694 -- \n", + " cudaLaunchKernel 1.71% 580.000us 1.71% 580.000us 5.524us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 105 -- \n", + " [memory] 0.00% 0.000us 0.00% 0.000us 0.000us 0.000us 0.00% 0.000us 0.000us -368 b -368 b -275.39 Mb -275.39 Mb 75 -- \n", + " cudaDeviceGetAttribute 0.00% 1.000us 0.00% 1.000us 0.048us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 21 -- \n", + "cudaOccupancyMaxActiveBlocksPerMultiprocessorWithFla... 0.02% 6.000us 0.02% 6.000us 0.545us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 11 -- \n", + " cudaPeekAtLastError 0.00% 0.000us 0.00% 0.000us 0.000us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 32 -- \n", + " cudaMemcpyAsync 8.65% 2.937ms 8.65% 2.937ms 48.950us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 60 -- \n", + " cudaStreamSynchronize 0.04% 15.000us 0.04% 15.000us 1.667us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 9 -- \n", + " cudaMemGetInfo 0.21% 73.000us 0.21% 73.000us 36.500us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 2 -- \n", + " cudaDeviceSynchronize 0.02% 6.000us 0.02% 6.000us 6.000us 0.000us 0.00% 0.000us 0.000us 0 b 0 b 0 b 0 b 1 -- \n", + "------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ ------------ \n", + "Self CPU time total: 33.956ms\n", + "Self CUDA time total: 15.307ms\n", + "\n" + ] + } + ], + "source": [ + "with profiler.profile(profile_memory=True, use_cuda=True, with_flops=True) as prof:\n", + " out = clustered_attention_vectorized(q, k, v, centroids, mask)\n", + "\n", + "print(prof.key_averages().table(sort_by=\"self_cuda_time_total\"))" + ] + }, + { + "cell_type": "markdown", + "id": "45736468", + "metadata": {}, + "source": [ + "## Going the extra mile: blockfied attention\n", + "\n", + "BigBird's self-attention relies on two simplifications to get speed improvements over vanilla self-attention:\n", + "\n", + "1. **Local + global + random connections:** Only attend to pre-determined elements, leading to a fixed pattern in the attention matrix\n", + "2. **Blocks:** Group contiguous tokens into chunks, leading to a blockfied pattern in the attention matrix\n", + "\n", + "While the first point is important for attending to relevant elements, the second point is crucial for optimizing runtime. How can we introduce blocks in our implementation? Fortunetally, we can rely (again) on broadcasting to implement a routine that blockfy inputs and another routine to unblockfy the output. " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "2c46185f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([2, 4, 1024, 16]), torch.Size([2, 4]))" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "q.shape, q.shape[:-2]" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "92bfa301", + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "\n", + "def blockfy(input, block_size=2):\n", + " seq_len = input.shape[-2]\n", + " n_blocks = math.ceil(seq_len / float(block_size))\n", + " # pad so that seq_len becomes divisible by block_size\n", + " # (batch, heads, seq_len, hdim) -> (batch, heads, seq_len + seq_len % block_size, hdim)\n", + " input_pad = torch.nn.functional.pad(input, (0, 0, 0, block_size - seq_len % block_size))\n", + " # reshape to get contiguous chunks\n", + " # (batch, heads, n, hdim) -> (batch, heads, n_blocks, block_size * hdim)\n", + " return input_pad.view(*input_pad.shape[:-2], n_blocks, -1)\n", + "\n", + "\n", + "def unblockfy(output, seq_len, block_size=2):\n", + " n_blocks = output.shape[-2]\n", + " # (batch, heads, n_blocks, block_size * hdim) -> (batch, heads, n_blocks * block_size, hdim)\n", + " output_pad = output.view(batch_size, num_heads, n_blocks * block_size, -1)\n", + " # cut pad out\n", + " return output_pad[:, :, :seq_len]\n", + "\n", + "\n", + "def unblockfy_attn(att_dist, seq_len, block_size=2, pad_mask=None, causal_mask=None):\n", + " # (batch, heads, n_blocks, n_blocks) -> (batch, heads, n_blocks * block_size, n_blocks * block_size)\n", + " att = att_dist.repeat_interleave(block_size, dim=-1).repeat_interleave(block_size, dim=-2)\n", + " # mask out padding and \"future\" positions\n", + " if pad_mask is not None:\n", + " # (batch, seq_len) -> (batch, n_blocks * block_size, n_blocks * block_size)\n", + " pairwise_mask = pad_mask.unsqueeze(-1) & pad_mask.unsqueeze(1)\n", + " # add head dimension\n", + " pairwise_mask = pairwise_mask.unsqueeze(1)\n", + " if causal_mask is not None:\n", + " # add elements of the triu to the mask\n", + " pairwise_mask = pairwise_mask & causal_mask.unsqueeze(0).unsqueeze(1)\n", + " # mask out \n", + " att = att.masked_fill(~pairwise_mask, 0)\n", + " # note that att is not a distribution anymore\n", + " return att[..., :seq_len, :seq_len]" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "id": "4b79ae62", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([2, 4, 1024, 16])\n", + "torch.Size([2, 4, 342, 48])\n", + "torch.Size([2, 4, 1024, 16])\n" + ] + } + ], + "source": [ + "print(q.shape)\n", + "print(blockfy(q, block_size=3).shape)\n", + "print(unblockfy(blockfy(q, block_size=3), seq_len=q.shape[-2], block_size=3).shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "id": "9a5f2d42", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 4, 1024, 1024])" + ] + }, + "execution_count": 64, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "att_dist = torch.randn(2, 4, 342, 342)\n", + "unblockfy_attn(att_dist, seq_len=q.shape[-2], block_size=3).shape" + ] + }, + { + "cell_type": "markdown", + "id": "73e4053a", + "metadata": {}, + "source": [ + "That is! We can simply call our attention module with:\n", + "\n", + "```python\n", + "q_block = blockfy(q)\n", + "k_block = blockfy(k)\n", + "v_block = blockfy(v)\n", + "mask_block = blockfy(mask.unsqueeze(-1), block_size=3).any(-1)\n", + "```\n", + "\n", + "And then in the end we can reshape the output back to the original sequence length:\n", + "```python\n", + "output = unblockfy(output, seq_len=seq_len, block_size=3)\n", + "# note that mask and causal_mask should be padded to\n", + "# have a length of n_blocks * block_size\n", + "att_dist = unblockfy_attn(att_dist, mask, causal_mask) \n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "0823406b", + "metadata": {}, + "source": [ + "---\n", + "\n", + "# Computing attention statistics\n", + "\n", + "Given that we are working with batches and heads, how can we compute independent statistics for attention maps? For example, we could be interested in computing the ammount of sparsity, or the recall when compared with a gold attention pattern. The traditional (and safe) way of doing this would envolve flattening tensors and calling a standard method from a well-tested library. However, we would be ignoring all the power that PyTorch brings to us. In fact, in cases like this is where PyTorch's broadcasting shines.\n", + "\n", + "To see the difference, let's implement a traditional version and some PyTorch versions of two statistics: **sparsity** and **recall**.\n", + "\n", + "Quick setup:" + ] + }, + { + "cell_type": "code", + "execution_count": 235, + "id": "c7bf471e", + "metadata": {}, + "outputs": [], + "source": [ + "batch_size = 2\n", + "sequence_length = 9\n", + "num_heads = 3\n", + "head_size = 2\n", + "hidden_size = num_heads * head_size\n", + "device = 'cuda' if torch.cuda.is_available() else 'cpu'\n", + "\n", + "att_dist = torch.randn(batch_size, num_heads, sequence_length, sequence_length, device=device).softmax(dim=-1)\n", + "gold_att_dist = torch.randint(0, 2, size=att_dist.shape, device=device)\n", + "mask = torch.tensor([5, 8]).unsqueeze(-1) >= torch.arange(sequence_length).unsqueeze(0).expand(batch_size, -1)\n", + "mask = mask.to(device)" + ] + }, + { + "cell_type": "markdown", + "id": "109b3d60", + "metadata": {}, + "source": [ + "Since softmax always produce nonzero probabilities, let's set some values to zero arbitraryly:" + ] + }, + { + "cell_type": "code", + "execution_count": 236, + "id": "3ee6f526", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.0000, 0.0000, 0.0000, 0.4710, 0.0000, 0.0000, 0.1022, 0.0000, 0.0000],\n", + " [0.2364, 0.0000, 0.2462, 0.0000, 0.0000, 0.1265, 0.1126, 0.1362, 0.0000],\n", + " [0.0000, 0.0000, 0.1799, 0.0000, 0.0000, 0.2459, 0.0000, 0.2417, 0.0000],\n", + " [0.1417, 0.0000, 0.0000, 0.0000, 0.0000, 0.1482, 0.1141, 0.1394, 0.3346],\n", + " [0.0000, 0.2016, 0.2307, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.2868],\n", + " [0.1814, 0.0000, 0.0000, 0.0000, 0.0000, 0.3844, 0.0000, 0.0000, 0.1693],\n", + " [0.0000, 0.0000, 0.0000, 0.0000, 0.3962, 0.0000, 0.2924, 0.0000, 0.0000],\n", + " [0.0000, 0.2786, 0.0000, 0.1485, 0.0000, 0.1361, 0.0000, 0.0000, 0.2319],\n", + " [0.0000, 0.2886, 0.1361, 0.0000, 0.1505, 0.0000, 0.2319, 0.0000, 0.0000]],\n", + " device='cuda:0')" + ] + }, + "execution_count": 236, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "att_dist = att_dist.masked_fill(att_dist < 0.1, 0)\n", + "att_dist[0, 0]" + ] + }, + { + "cell_type": "markdown", + "id": "e7187a3d", + "metadata": {}, + "source": [ + "One thing to keep in mind is that Transformers' self-attention are masked for keys (last dimension) but not for queries (second last dimension). In practice this behavior does not impact the padding positions are simply ignored for the final task. However, when computing attention statistics we have to take these padded positions into account. \n", + "\n", + "We will start with raw python to simplify things. " + ] + }, + { + "cell_type": "code", + "execution_count": 237, + "id": "74af96cb", + "metadata": {}, + "outputs": [], + "source": [ + "def calc_sparsity_vanilla(att_dist, mask):\n", + " batch_size, num_heads, _, _ = att_dist.shape\n", + " p = 0 # positive count\n", + " n = 0 # total count\n", + " for i in range(batch_size):\n", + " valid_seq_len = mask[i].sum().item() \n", + " n += num_heads * valid_seq_len ** 2\n", + " for h in range(num_heads):\n", + " p += sum([int(att_dist[i, h, k1, k2].item() > 0) \n", + " for k1 in range(valid_seq_len) \n", + " for k2 in range(valid_seq_len)])\n", + " return 1 - p / n" + ] + }, + { + "cell_type": "code", + "execution_count": 238, + "id": "ccd9393b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([6, 9], device='cuda:0')" + ] + }, + "execution_count": 238, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "mask.sum(-1)" + ] + }, + { + "cell_type": "code", + "execution_count": 239, + "id": "f700c307", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.5982905982905983" + ] + }, + "execution_count": 239, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "calc_sparsity_vanilla(att_dist, mask)" + ] + }, + { + "cell_type": "markdown", + "id": "1e9b59fb", + "metadata": {}, + "source": [ + "Now with PyTorch:" + ] + }, + { + "cell_type": "code", + "execution_count": 240, + "id": "3c15547e", + "metadata": {}, + "outputs": [], + "source": [ + "def calc_sparsity_vectorized(att_dist, mask):\n", + " pairwise_mask = mask[:, None, :, None] & mask[:, None, None, :]\n", + " p = (att_dist > 0).masked_fill(~pairwise_mask, False).sum().item()\n", + " n = num_heads * pairwise_mask.sum().item()\n", + " return 1 - p / n" + ] + }, + { + "cell_type": "code", + "execution_count": 241, + "id": "1a3e2a70", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.5982905982905983" + ] + }, + "execution_count": 241, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "calc_sparsity_vectorized(att_dist, mask)" + ] + }, + { + "cell_type": "markdown", + "id": "74e2dab8", + "metadata": {}, + "source": [ + "Simple and efficient!\n", + "\n", + "If we were using a encoder-decoder transformer we would also need to account for causal masks (future position masking). This could be implementead easily as follows:\n", + "\n", + "```python\n", + "pairwise_mask = pairwise_mask & causal_mask[:, None, :, :]\n", + "```\n", + "\n", + "Now let's turn to the other statistic: recall. " + ] + }, + { + "cell_type": "code", + "execution_count": 303, + "id": "46cde6af", + "metadata": {}, + "outputs": [], + "source": [ + "def calc_recall_vanilla(gold_att_dist, pred_att_dist, mask):\n", + " from sklearn.metrics import recall_score\n", + " batch_size, num_heads, _, _ = pred_att_dist.shape\n", + " recalls = torch.zeros(batch_size, num_heads)\n", + " for i in range(batch_size):\n", + " valid_seq_len = mask[i].sum().item() \n", + " for h in range(num_heads):\n", + " g = (gold_att_dist[i, h, :valid_seq_len, :valid_seq_len] > 0).long().flatten().tolist()\n", + " p = (pred_att_dist[i, h, :valid_seq_len, :valid_seq_len] > 0).long().flatten().tolist()\n", + " recalls[i, h] = recall_score(g, p)\n", + " return recalls.mean() # macro-averaged" + ] + }, + { + "cell_type": "code", + "execution_count": 304, + "id": "6ff62d17", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(0.4218)" + ] + }, + "execution_count": 304, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "calc_recall_vanilla(gold_att_dist, att_dist, mask)" + ] + }, + { + "cell_type": "code", + "execution_count": 305, + "id": "eb0156f7", + "metadata": {}, + "outputs": [], + "source": [ + "def calc_recall_vectorized(gold_att_dist, pred_att_dist, mask):\n", + " pairwise_mask = mask[:, None, :, None] & mask[:, None, None, :]\n", + " g = (gold_att_dist > 0).masked_fill(~pairwise_mask, False)\n", + " p = (pred_att_dist > 0).masked_fill(~pairwise_mask, False)\n", + " matches_per_head_and_batch = (p & g).sum(-1).sum(-1).float() / g.sum(-1).sum(-1).float()\n", + " return matches_per_head_and_batch.mean() # macro-averaged" + ] + }, + { + "cell_type": "code", + "execution_count": 306, + "id": "0850e180", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(0.4218, device='cuda:0')" + ] + }, + "execution_count": 306, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "calc_recall_vectorized(gold_att_dist, att_dist, mask)" + ] + }, + { + "cell_type": "markdown", + "id": "1a2551b2", + "metadata": {}, + "source": [ + "That is it! To finalize, let's write one more statistic function: a method that returns the portion of fully recovered predictions, i.e., the portion of predicted attention distributions with 100% recall.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 313, + "id": "ddea4d05", + "metadata": {}, + "outputs": [], + "source": [ + "def compute_exact_fraction(gold_att_dist, pred_att_dist, mask):\n", + " pairwise_mask = mask[:, None, :, None] & mask[:, None, None, :]\n", + " g = (gold_att_dist > 0).masked_fill(~pairwise_mask, False)\n", + " p = (pred_att_dist > 0).masked_fill(~pairwise_mask, False)\n", + " matches = p & g\n", + " matches_per_query = matches.sum(-1).float()\n", + " total_per_query = g.sum(-1).float()\n", + " # might get nans due to zero division\n", + " recall_per_query = matches_per_query / total_per_query\n", + " exact_per_query = recall_per_query == 1.0\n", + " # filter nans out\n", + " valid_exact_per_query = exact_per_query.masked_fill(~mask[:, None, :], False)\n", + " lengths = mask.sum(-1).unsqueeze(-1).float()\n", + " exact_per_head_and_batch = valid_exact_per_query.sum(-1) / lengths\n", + " return exact_per_head_and_batch.mean() # macro-averaged" + ] + }, + { + "cell_type": "code", + "execution_count": 314, + "id": "1e3ad95a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(0.0741, device='cuda:0')" + ] + }, + "execution_count": 314, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "compute_exact_fraction(gold_att_dist, att_dist, mask)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/challenges-for-true-pytorch-heroes-solutions.ipynb b/challenges-for-true-pytorch-heroes-solutions.ipynb new file mode 100644 index 0000000..d6dc590 --- /dev/null +++ b/challenges-for-true-pytorch-heroes-solutions.ipynb @@ -0,0 +1,950 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PyTorch Challenges\n", + "\n", + "These set of challenges are concerned about broadcasting, one of the key concepts when dealing with tensors.\n", + "\n", + "[Sasha Rush](https://twitter.com/srush_nlp) compiled a set of [16 Tensor mini-puzzles](https://github.com/srush/Tensor-Puzzles) that involve reasoning about broadcasting in a constrained setting: people are allowed to use only a single PyTorch function: `torch.arange`. Can you do it?\n", + "\n", + "Here, I've extended his list to 26 puzzles! \n", + "\n", + "**Rules**\n", + "\n", + "- Each puzzle needs to be solved in 1 line (<80 columns) of code.\n", + "- You are allowed @, arithmetic, comparison, shape, any indexing (e.g. `a[:j], a[:, None], a[arange(10)]`), and previous puzzle functions.\n", + "- To start off, we give you an implementation for the `torch.arange` function.\n", + "\n", + "**Anti-Rules**\n", + "- Nothing else. No `.view, .sum, .take, .squeeze, .tensor`.\n", + "- No cheating. Stackoverflow is great, but this is about first-principles.\n", + "- Hint... these puzzles are mostly about [Broadcasting](https://pytorch.org/docs/master/notes/broadcasting.html). Make sure you understand this rule.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 169, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from spec import make_test, run_test, TT" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### arange\n", + "\n", + "This is given for free! Think about it as a \"for-loop\"" + ] + }, + { + "cell_type": "code", + "execution_count": 170, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3, 4, 5])" + ] + }, + "execution_count": 170, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def arange(i: int):\n", + " return torch.arange(i)\n", + "\n", + "arange(6)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### where" + ] + }, + { + "cell_type": "code", + "execution_count": 171, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 0, -1, 2, -1])" + ] + }, + "execution_count": 171, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def where(q, a, b):\n", + " return q * a + (~q) * b\n", + "\n", + "where(arange(4) % 2 == 0, arange(4), -1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### ones" + ] + }, + { + "cell_type": "code", + "execution_count": 172, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1, 1, 1, 1])" + ] + }, + "execution_count": 172, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def ones(i: int):\n", + " return where(arange(i) >= 0, 1, 0)\n", + "\n", + "ones(4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### sum" + ] + }, + { + "cell_type": "code", + "execution_count": 173, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(6)" + ] + }, + "execution_count": 173, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def sum(a: torch.Tensor):\n", + " return ones(a.shape[0]) @ a\n", + "\n", + "sum(arange(4))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### outer" + ] + }, + { + "cell_type": "code", + "execution_count": 174, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 0, 0],\n", + " [1, 1, 1],\n", + " [2, 2, 2],\n", + " [3, 3, 3]])" + ] + }, + "execution_count": 174, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def outer(a: torch.Tensor, b: torch.Tensor):\n", + " return a[:, None] * b[None, :]\n", + "\n", + "outer(arange(4), ones(3))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### diag" + ] + }, + { + "cell_type": "code", + "execution_count": 175, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3])" + ] + }, + "execution_count": 175, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def diag(a: torch.Tensor):\n", + " return a[arange(a.shape[0]), arange(a.shape[0])]\n", + "\n", + "diag(outer(arange(4), ones(4)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### eye" + ] + }, + { + "cell_type": "code", + "execution_count": 176, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1, 0, 0, 0],\n", + " [0, 1, 0, 0],\n", + " [0, 0, 1, 0],\n", + " [0, 0, 0, 1]])" + ] + }, + "execution_count": 176, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def eye(j: int):\n", + " return (arange(j)[:, None] == arange(j)[None, :]) * 1\n", + "\n", + "eye(4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### triu" + ] + }, + { + "cell_type": "code", + "execution_count": 177, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1, 1, 1, 1],\n", + " [0, 1, 1, 1],\n", + " [0, 0, 1, 1],\n", + " [0, 0, 0, 1]])" + ] + }, + "execution_count": 177, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def triu(j: int):\n", + " return (arange(j)[:,None] <= arange(j))*1\n", + "\n", + "triu(4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### cumsum" + ] + }, + { + "cell_type": "code", + "execution_count": 178, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 3, 6])" + ] + }, + "execution_count": 178, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def cumsum(a: torch.Tensor):\n", + " return (outer(ones(a.shape[0]), a) @ triu(a.shape[0]))[0]\n", + "\n", + "cumsum(torch.arange(4))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### diff" + ] + }, + { + "cell_type": "code", + "execution_count": 179, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 1, 1])" + ] + }, + "execution_count": 179, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def diff(a: torch.Tensor, i: int):\n", + " return a - a[where(arange(i) > 0, arange(i)-1, 0)] + (a*(arange(i) <= 0))\n", + "\n", + "diff(arange(4), 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### vstack" + ] + }, + { + "cell_type": "code", + "execution_count": 180, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 1, 2, 3],\n", + " [1, 1, 1, 1]])" + ] + }, + "execution_count": 180, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def vstack(a: torch.Tensor, b: torch.Tensor):\n", + " return a * (1-arange(2)[:, None]) + b * arange(2)[:, None]\n", + "\n", + "vstack(arange(4), ones(4))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### roll" + ] + }, + { + "cell_type": "code", + "execution_count": 181, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1, 2, 3, 0])" + ] + }, + "execution_count": 181, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def roll(a: torch.Tensor, i: int):\n", + " return a[(arange(i) + 1) * ((arange(i) + 1) < i)]\n", + "\n", + "roll(arange(4), 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### flip" + ] + }, + { + "cell_type": "code", + "execution_count": 182, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([3, 2, 1, 0])" + ] + }, + "execution_count": 182, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def flip(a: torch.Tensor, i: int):\n", + " return a[i - arange(i) - 1]\n", + "\n", + "flip(arange(4), 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### compress" + ] + }, + { + "cell_type": "code", + "execution_count": 183, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1, 2, 0])" + ] + }, + "execution_count": 183, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def compress(g: torch.Tensor, v: torch.Tensor, i: int):\n", + " return sum(eye(i)[:sum(g*1)] * outer(v[g], ones(i)))\n", + "\n", + "compress(torch.tensor([False, True, True]), arange(3), 3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### pad_to" + ] + }, + { + "cell_type": "code", + "execution_count": 184, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 0, 0])" + ] + }, + "execution_count": 184, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def pad_to(a: torch.Tensor, i: int, j: int):\n", + " return sum((arange(i)[:, None] == arange(j)[None, :]) * a[:, None])\n", + "\n", + "pad_to(arange(3), 3, 5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### sequence_mask" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1, 1, 0],\n", + " [1, 1, 0],\n", + " [1, 0, 0],\n", + " [1, 1, 1]])" + ] + }, + "execution_count": 185, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def sequence_mask(values: torch.Tensor, length: torch.Tensor):\n", + " return values * (length[:, None] > arange(values.shape[-1])[None, :])\n", + "\n", + "sequence_mask(outer(ones(4), ones(3)), torch.tensor([2,2,1,3]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### bincount" + ] + }, + { + "cell_type": "code", + "execution_count": 186, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1, 3, 4, 2])" + ] + }, + "execution_count": 186, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def bincount(a: torch.Tensor, j: int):\n", + " return ones(len(a)) @ ((a[:, None] == arange(j)[None, :]) * 1)\n", + "\n", + "bincount(torch.tensor([2, 1, 3, 3, 1, 2, 2, 2, 1, 0]), 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### scatter_add" + ] + }, + { + "cell_type": "code", + "execution_count": 187, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([8, 7, 5, 4])" + ] + }, + "execution_count": 187, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def scatter_add(values: torch.Tensor, link: torch.Tensor, j: int):\n", + " return sum((link[:, None] == arange(j)[None, :]) * outer(values, ones(j)))\n", + "\n", + "scatter_add(torch.tensor([5,1,7,2,3,2,1,3]), torch.tensor([0,0,1,0,2,2,3,3]), 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### flatten" + ] + }, + { + "cell_type": "code", + "execution_count": 189, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])" + ] + }, + "execution_count": 189, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def flatten(a: torch.Tensor, i:int, j:int):\n", + " return a[outer(ones(i), ones(j)) == 1]\n", + "\n", + "flatten(arange(16).view(4, 4), 4, 4)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### linspace" + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0.0000, 0.1111, 0.2222, 0.3333, 0.4444, 0.5556, 0.6667, 0.7778, 0.8889,\n", + " 1.0000])" + ] + }, + "execution_count": 190, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def linspace(i: float, j: float, n: int):\n", + " return i + (j - i) * arange(n) / max(1, (n - 1))\n", + "\n", + "linspace(0, 1, 10)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### heaviside" + ] + }, + { + "cell_type": "code", + "execution_count": 191, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 1.0000, -2.6444, 0.0000])" + ] + }, + "execution_count": 191, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def heaviside(a: torch.Tensor, b: torch.Tensor):\n", + " return (a > 0) + (a == 0) * b\n", + "\n", + "heaviside(torch.tensor([1, 0, -2]), torch.randn(3))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### hstack" + ] + }, + { + "cell_type": "code", + "execution_count": 192, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 1],\n", + " [1, 1],\n", + " [2, 1]])" + ] + }, + "execution_count": 192, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def hstack(a: torch.Tensor, b: torch.Tensor):\n", + " return a[:,None] * eye(2)[0] + b[:,None] * eye(2)[1]\n", + "\n", + "hstack(arange(3), ones(3))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### view (1d to 2d)" + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 1],\n", + " [2, 3],\n", + " [4, 5]])" + ] + }, + "execution_count": 193, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def view(a: torch.Tensor, i: int, j: int):\n", + " return a[(j * arange(i)[:,None] + arange(j)[None]) % len(a)][:i, :j]\n", + "\n", + "view(arange(6), 3, 2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### repeat (1d)" + ] + }, + { + "cell_type": "code", + "execution_count": 194, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0, 1, 2, 3, 4])" + ] + }, + "execution_count": 194, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def repeat(a: torch.Tensor, d: int):\n", + " return (ones(d)[:, None] * a)[outer(ones(d), ones(len(a))) == 1]\n", + "\n", + "repeat(arange(5), 3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### repeat_interleave (1d)" + ] + }, + { + "cell_type": "code", + "execution_count": 195, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])" + ] + }, + "execution_count": 195, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def repeat_interleave(a: torch.Tensor, d: int):\n", + " return (ones(d)[:, None] * a).T[outer(ones(len(a)), ones(d)) == 1]\n", + "\n", + "repeat_interleave(arange(5), 3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### chunk" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[tensor([0, 1]),\n", + " tensor([2, 3]),\n", + " tensor([4, 5]),\n", + " tensor([6, 7]),\n", + " tensor([8, 9]),\n", + " tensor([10, 11])]" + ] + }, + "execution_count": 198, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def chunk(a: torch.Tensor, c: int):\n", + " return list(view(a, c, len(a)//c))\n", + "\n", + "chunk(torch.arange(12), 6)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### nonzero" + ] + }, + { + "cell_type": "code", + "execution_count": 200, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 0],\n", + " [1, 1],\n", + " [2, 2]])" + ] + }, + "execution_count": 200, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def nonzero(a: torch.Tensor, i: int, j: int):\n", + " return hstack(outer(arange(i),ones(j))[a!=0],outer(ones(i),arange(j))[a!=0])\n", + "\n", + "nonzero(eye(3), 3, 3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### bucketize" + ] + }, + { + "cell_type": "code", + "execution_count": 201, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1, 3, 4])" + ] + }, + "execution_count": 201, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def bucketize(v: torch.Tensor, boundaries: torch.Tensor):\n", + " return sum((v[:,None] > boundaries[None, :]).T * 1)\n", + "\n", + "bucketize(torch.tensor([3, 6, 9]), torch.tensor([1, 3, 5, 7, 9]))" + ] + } + ], + "metadata": { + "anaconda-cloud": {}, + "celltoolbar": "Raw Cell Format", + "jupytext": { + "formats": "ipynb,py:percent" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/challenges-for-true-pytorch-heroes.ipynb b/challenges-for-true-pytorch-heroes.ipynb index 705ee7f..b15be79 100644 --- a/challenges-for-true-pytorch-heroes.ipynb +++ b/challenges-for-true-pytorch-heroes.ipynb @@ -794,6 +794,9 @@ "metadata": { "anaconda-cloud": {}, "celltoolbar": "Raw Cell Format", + "jupytext": { + "formats": "ipynb,py:percent" + }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", From b756dafbf2bf7c90e1a706aa0849cefad016434b Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Sun, 8 May 2022 07:00:42 +0100 Subject: [PATCH 17/18] Update README.md --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index da1d114..0ad1062 100644 --- a/README.md +++ b/README.md @@ -16,9 +16,11 @@ Throughout this course we will be using: [Lecture 3](https://github.com/mtreviso/pytorch-lecture/blob/master/03-modules-and-mlps.ipynb): PyTorch `nn.Modules` alongside training and evaluation loop
[Lecture 4](https://github.com/mtreviso/pytorch-lecture/blob/master/04-optional-word2vec.ipynb): Implementation of a proof-of-concept Word2Vec in PyTorch
-⏳⏳⏳ [Bonus](https://github.com/mtreviso/pytorch-lecture/blob/master/bonus-computational-efficiency.ipynb): Comparison of the computation efficiency between raw Python, Numpy, and PyTorch (with and without JIT) +⏳ [Bonus](https://github.com/mtreviso/pytorch-lecture/blob/master/bonus-computational-efficiency.ipynb): Comparison of the computation efficiency between raw Python, Numpy, and PyTorch (with and without JIT)
-🔥🔥🔥 [PyTorch Challenges](https://github.com/mtreviso/pytorch-lecture/blob/master/challenges-for-true-pytorch-heroes.ipynb): a set of 27 mini-puzzles 🧩 (extension of the ones proposed by [Sasha Rush](https://github.com/srush/Tensor-Puzzles)) +🔥 [PyTorch Challenges](https://github.com/mtreviso/pytorch-lecture/blob/master/challenges-for-true-pytorch-heroes.ipynb): a set of 27 mini-puzzles (extension of the ones proposed by [Sasha Rush](https://github.com/srush/Tensor-Puzzles)) +
+🌎 [From Puzzles to Real Code](https://github.com/mtreviso/pytorch-lecture/blob/master/broadcasting_real_examples.ipynb): Examples of broadcasting in real word applications: **wordpieces aggregation**, **clustered attention**, **attention statistics**. # Installation From 8b348fe95ec3c1157c37cacbb8bd71894bd17895 Mon Sep 17 00:00:00 2001 From: Marcos Treviso Date: Sun, 8 May 2022 07:01:09 +0100 Subject: [PATCH 18/18] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0ad1062..ee499dc 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Throughout this course we will be using: [Lecture 3](https://github.com/mtreviso/pytorch-lecture/blob/master/03-modules-and-mlps.ipynb): PyTorch `nn.Modules` alongside training and evaluation loop
[Lecture 4](https://github.com/mtreviso/pytorch-lecture/blob/master/04-optional-word2vec.ipynb): Implementation of a proof-of-concept Word2Vec in PyTorch
-⏳ [Bonus](https://github.com/mtreviso/pytorch-lecture/blob/master/bonus-computational-efficiency.ipynb): Comparison of the computation efficiency between raw Python, Numpy, and PyTorch (with and without JIT) +⏳ [Bonus](https://github.com/mtreviso/pytorch-lecture/blob/master/bonus-computational-efficiency.ipynb): Comparison of the computation efficiency between raw Python, Numpy, and PyTorch (+JIT)
🔥 [PyTorch Challenges](https://github.com/mtreviso/pytorch-lecture/blob/master/challenges-for-true-pytorch-heroes.ipynb): a set of 27 mini-puzzles (extension of the ones proposed by [Sasha Rush](https://github.com/srush/Tensor-Puzzles))