diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/README.md" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/README.md" index c725654..d00120d 100644 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/README.md" +++ "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/README.md" @@ -1 +1 @@ -# 경쟁을 통해 성장하는 GAN +# 경쟁하며 학습하는 GAN diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.ipynb" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.ipynb" index 6d59e60..076a436 100644 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.ipynb" +++ "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.ipynb" @@ -24,26 +24,6 @@ "import numpy as np" ] }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "torch.manual_seed(1) # reproducible" - ] - }, { "cell_type": "code", "execution_count": 3, @@ -58,7 +38,7 @@ } ], "source": [ - "# Hyper Parameters\n", + "# 하이퍼파라미터\n", "EPOCHS = 300\n", "BATCH_SIZE = 100\n", "USE_CUDA = torch.cuda.is_available()\n", @@ -72,7 +52,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Fashion MNIST digits dataset\n", + "# Fashion MNIST 데이터셋\n", "trainset = datasets.FashionMNIST('./.data',\n", " train=True,\n", " download=True,\n", @@ -103,7 +83,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Discriminator\n", + "# 생성자 (Generator)\n", "D = nn.Sequential(\n", " nn.Linear(784, 256),\n", " nn.LeakyReLU(0.2),\n", @@ -119,7 +99,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Generator \n", + "# 판별자 (Discriminator)\n", "G = nn.Sequential(\n", " nn.Linear(64 + 10, 256),\n", " nn.ReLU(),\n", @@ -135,12 +115,12 @@ "metadata": {}, "outputs": [], "source": [ - "\n", - "# Device setting\n", + "# 모델의 가중치를 지정한 장치로 보내기\n", "D = D.to(DEVICE)\n", "G = G.to(DEVICE)\n", "\n", - "# Binary cross entropy loss and optimizer\n", + "# 이진 크로스 엔트로피 (Binary cross entropy) 오차 함수와\n", + "# 생성자와 판별자를 최적화할 Adam 모듈\n", "criterion = nn.BCELoss()\n", "d_optimizer = optim.Adam(D.parameters(), lr=0.0002)\n", "g_optimizer = optim.Adam(G.parameters(), lr=0.0002)" @@ -1156,16 +1136,13 @@ " g_loss.backward()\n", " g_optimizer.step()\n", " \n", - " if (i+1) % 200 == 0:\n", - " print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", - " .format(epoch,\n", - " EPOCHS,\n", - " i+1,\n", - " total_step,\n", - " d_loss.item(),\n", - " g_loss.item(), \n", - " real_score.mean().item(),\n", - " fake_score.mean().item()))" + " print('Epoch [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", + " .format(epoch,\n", + " EPOCHS,\n", + " d_loss.item(),\n", + " g_loss.item(), \n", + " real_score.mean().item(),\n", + " fake_score.mean().item()))" ] }, { diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.py" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.py" index f803c01..0ae86d0 100644 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.py" +++ "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/conditional_gan.py" @@ -14,10 +14,7 @@ import numpy as np -torch.manual_seed(1) # reproducible - - -# Hyper Parameters +# 하이퍼파라미터 EPOCHS = 300 BATCH_SIZE = 100 USE_CUDA = torch.cuda.is_available() @@ -25,7 +22,7 @@ print("Using Device:", DEVICE) -# Fashion MNIST digits dataset +# Fashion MNIST 데이터셋 trainset = datasets.FashionMNIST('./.data', train=True, download=True, @@ -44,7 +41,7 @@ def one_hot_embedding(labels, num_classes): return y[labels] -# Discriminator +# 생성자 (Generator) D = nn.Sequential( nn.Linear(784, 256), nn.LeakyReLU(0.2), @@ -54,7 +51,7 @@ def one_hot_embedding(labels, num_classes): nn.Sigmoid()) -# Generator +# 판별자 (Discriminator) G = nn.Sequential( nn.Linear(64 + 10, 256), nn.ReLU(), @@ -64,12 +61,12 @@ def one_hot_embedding(labels, num_classes): nn.Tanh()) - -# Device setting +# 모델의 가중치를 지정한 장치로 보내기 D = D.to(DEVICE) G = G.to(DEVICE) -# Binary cross entropy loss and optimizer +# 이진 크로스 엔트로피 (Binary cross entropy) 오차 함수와 +# 생성자와 판별자를 최적화할 Adam 모듈 criterion = nn.BCELoss() d_optimizer = optim.Adam(D.parameters(), lr=0.0002) g_optimizer = optim.Adam(G.parameters(), lr=0.0002) @@ -118,16 +115,13 @@ def one_hot_embedding(labels, num_classes): g_loss.backward() g_optimizer.step() - if (i+1) % 200 == 0: - print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' - .format(epoch, - EPOCHS, - i+1, - total_step, - d_loss.item(), - g_loss.item(), - real_score.mean().item(), - fake_score.mean().item())) + print('Epoch [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' + .format(epoch, + EPOCHS, + d_loss.item(), + g_loss.item(), + real_score.mean().item(), + fake_score.mean().item())) for i in range(100): diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.ipynb" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.ipynb" index 71781e2..fcabd51 100644 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.ipynb" +++ "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.ipynb" @@ -4,21 +4,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# 9.1 GAN으로 새로운 패션아이템 생성하기\n", + "# GAN으로 새로운 패션아이템 생성하기\n", "*GAN을 이용하여 새로운 패션 아이템을 만들어봅니다*\n", "\n", - "GAN을 구현하기 위해 그 구조를 더 자세히 알아보겠습니다.\n", + "이 프로젝트는 최윤제님의 파이토치 튜토리얼 사용 허락을 받아 참고했습니다.\n", "\n", - "GAN은 생성자(Generator)와 판별자(Discriminator) 2개의 신경망으로\n", - "이루어져 있습니다.\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## GAN 구현하기" + "* [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License" ] }, { @@ -33,32 +24,21 @@ "import torch.nn as nn\n", "import torch.optim as optim\n", "from torchvision import transforms, datasets\n", - "from torchvision.utils import save_image" + "from torchvision.utils import save_image\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np" ] }, { - "cell_type": "code", - "execution_count": 2, + "cell_type": "markdown", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ - "torch.manual_seed(1) # reproducible" + "EPOCHS 과 BATCH_SIZE 등 학습에 필요한 하이퍼 파라미터 들을 설정해 줍니다." ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -70,121 +50,28 @@ } ], "source": [ - "# Hyper Parameters\n", - "EPOCHS = 100\n", + "# 하이퍼파라미터\n", + "EPOCHS = 500\n", "BATCH_SIZE = 100\n", "USE_CUDA = torch.cuda.is_available()\n", "DEVICE = torch.device(\"cuda\" if USE_CUDA else \"cpu\")\n", "print(\"Using Device:\", DEVICE)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "학습에 필요한 데이터셋을 로딩합니다. " + ] + }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "0it [00:00, ?it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz to ./.data/FashionMNIST/raw/train-images-idx3-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "26427392it [00:08, 3068511.07it/s] \n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./.data/FashionMNIST/raw/train-images-idx3-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "0it [00:00, ?it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz to ./.data/FashionMNIST/raw/train-labels-idx1-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "32768it [00:00, 46015.88it/s] \n", - "0it [00:00, ?it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./.data/FashionMNIST/raw/train-labels-idx1-ubyte.gz\n", - "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz to ./.data/FashionMNIST/raw/t10k-images-idx3-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "4423680it [00:02, 1666677.48it/s] \n", - "0it [00:00, ?it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./.data/FashionMNIST/raw/t10k-images-idx3-ubyte.gz\n", - "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz to ./.data/FashionMNIST/raw/t10k-labels-idx1-ubyte.gz\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "8192it [00:00, 21224.52it/s] " - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Extracting ./.data/FashionMNIST/raw/t10k-labels-idx1-ubyte.gz\n", - "Processing...\n", - "Done!\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], + "outputs": [], "source": [ - "# Fashion MNIST digits dataset\n", + "# Fashion MNIST 데이터셋\n", "trainset = datasets.FashionMNIST('./.data',\n", " train=True,\n", " download=True,\n", @@ -198,13 +85,43 @@ " shuffle = True)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "생성자는 64차원의 랜덤한 텐서를 입력받아 이에 행렬곱(Linear)과 활성화 함수(ReLU, Tanh) 연산을 실행합니다. 생성자의 결과값은 784차원, 즉 Fashion MNIST 속의 이미지와 같은 차원의 텐서입니다." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# 생성자 (Generator)\n", + "G = nn.Sequential(\n", + " nn.Linear(64, 256),\n", + " nn.ReLU(),\n", + " nn.Linear(256, 256),\n", + " nn.ReLU(),\n", + " nn.Linear(256, 784),\n", + " nn.Tanh())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "판별자는 784차원의 텐서를 입력받습니다. 판별자 역시 입력된 데이터에 행렬곱과 활성화 함수를 실행시키지만, 생성자와 달리 판별자의 결과값은 입력받은 텐서가 진짜인지 구분하는 예측값입니다." + ] + }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ - "# Discriminator\n", + "# 판별자 (Discriminator)\n", "D = nn.Sequential(\n", " nn.Linear(784, 256),\n", " nn.LeakyReLU(0.2),\n", @@ -215,365 +132,569 @@ ] }, { - "cell_type": "code", - "execution_count": 6, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# Generator \n", - "G = nn.Sequential(\n", - " nn.Linear(64, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 784),\n", - " nn.Tanh())" + "생성자와 판별자 학습에 쓰일 오차 함수와 최적화 알고리즘도 정의해 줍니다." ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ - "\n", - "# Device setting\n", + "# 모델의 가중치를 지정한 장치로 보내기\n", "D = D.to(DEVICE)\n", "G = G.to(DEVICE)\n", "\n", - "# Binary cross entropy loss and optimizer\n", + "# 이진 크로스 엔트로피 (Binary cross entropy) 오차 함수와\n", + "# 생성자와 판별자를 최적화할 Adam 모듈\n", "criterion = nn.BCELoss()\n", "d_optimizer = optim.Adam(D.parameters(), lr=0.0002)\n", "g_optimizer = optim.Adam(G.parameters(), lr=0.0002)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "모델 학습에 필요한 준비는 끝났습니다. 그럼 본격적으로 GAN을 학습시키는 loop을 만들어 보겠습니다. " + ] + }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [0/100], Step [200/600], d_loss: 0.0787, g_loss: 4.1506, D(x): 0.98, D(G(z)): 0.06\n", - "Epoch [0/100], Step [400/600], d_loss: 0.2156, g_loss: 4.7861, D(x): 0.93, D(G(z)): 0.10\n", - "Epoch [0/100], Step [600/600], d_loss: 0.0326, g_loss: 5.2619, D(x): 0.99, D(G(z)): 0.02\n", - "Epoch [1/100], Step [200/600], d_loss: 0.0656, g_loss: 5.0974, D(x): 0.99, D(G(z)): 0.03\n", - "Epoch [1/100], Step [400/600], d_loss: 0.1571, g_loss: 3.6610, D(x): 0.95, D(G(z)): 0.07\n", - "Epoch [1/100], Step [600/600], d_loss: 0.0500, g_loss: 4.5240, D(x): 0.99, D(G(z)): 0.03\n", - "Epoch [2/100], Step [200/600], d_loss: 0.0376, g_loss: 6.1814, D(x): 0.98, D(G(z)): 0.01\n", - "Epoch [2/100], Step [400/600], d_loss: 0.0241, g_loss: 6.5856, D(x): 0.99, D(G(z)): 0.01\n", - "Epoch [2/100], Step [600/600], d_loss: 0.1581, g_loss: 6.0980, D(x): 0.96, D(G(z)): 0.02\n", - "Epoch [3/100], Step [200/600], d_loss: 0.0641, g_loss: 6.9642, D(x): 0.97, D(G(z)): 0.00\n", - "Epoch [3/100], Step [400/600], d_loss: 0.1090, g_loss: 4.8299, D(x): 0.96, D(G(z)): 0.02\n", - "Epoch [3/100], Step [600/600], d_loss: 0.0313, g_loss: 6.9685, D(x): 0.99, D(G(z)): 0.02\n", - "Epoch [4/100], Step [200/600], d_loss: 0.0850, g_loss: 4.4195, D(x): 0.97, D(G(z)): 0.02\n", - "Epoch [4/100], Step [400/600], d_loss: 0.1303, g_loss: 4.9543, D(x): 0.97, D(G(z)): 0.06\n", - "Epoch [4/100], Step [600/600], d_loss: 0.1894, g_loss: 5.1442, D(x): 0.94, D(G(z)): 0.04\n", - "Epoch [5/100], Step [200/600], d_loss: 0.2450, g_loss: 6.8410, D(x): 0.93, D(G(z)): 0.05\n", - "Epoch [5/100], Step [400/600], d_loss: 0.3553, g_loss: 4.7668, D(x): 0.95, D(G(z)): 0.15\n", - "Epoch [5/100], Step [600/600], d_loss: 0.2118, g_loss: 3.4650, D(x): 0.92, D(G(z)): 0.06\n", - "Epoch [6/100], Step [200/600], d_loss: 0.2083, g_loss: 3.1407, D(x): 0.95, D(G(z)): 0.10\n", - "Epoch [6/100], Step [400/600], d_loss: 0.1540, g_loss: 6.3076, D(x): 0.95, D(G(z)): 0.03\n", - "Epoch [6/100], Step [600/600], d_loss: 0.1092, g_loss: 5.9090, D(x): 0.95, D(G(z)): 0.01\n", - "Epoch [7/100], Step [200/600], d_loss: 0.0912, g_loss: 4.4518, D(x): 0.98, D(G(z)): 0.05\n", - "Epoch [7/100], Step [400/600], d_loss: 0.1132, g_loss: 4.1672, D(x): 0.96, D(G(z)): 0.04\n", - "Epoch [7/100], Step [600/600], d_loss: 0.1899, g_loss: 4.1865, D(x): 0.98, D(G(z)): 0.14\n", - "Epoch [8/100], Step [200/600], d_loss: 0.1007, g_loss: 5.8568, D(x): 0.96, D(G(z)): 0.02\n", - "Epoch [8/100], Step [400/600], d_loss: 0.2294, g_loss: 4.5870, D(x): 0.95, D(G(z)): 0.05\n", - "Epoch [8/100], Step [600/600], d_loss: 0.1160, g_loss: 6.2999, D(x): 0.98, D(G(z)): 0.02\n", - "Epoch [9/100], Step [200/600], d_loss: 0.1227, g_loss: 3.7400, D(x): 0.96, D(G(z)): 0.05\n", - "Epoch [9/100], Step [400/600], d_loss: 0.1638, g_loss: 4.2894, D(x): 0.96, D(G(z)): 0.08\n", - "Epoch [9/100], Step [600/600], d_loss: 0.1253, g_loss: 3.7274, D(x): 0.94, D(G(z)): 0.03\n", - "Epoch [10/100], Step [200/600], d_loss: 0.1965, g_loss: 4.3236, D(x): 0.97, D(G(z)): 0.08\n", - "Epoch [10/100], Step [400/600], d_loss: 0.2227, g_loss: 4.4192, D(x): 0.94, D(G(z)): 0.07\n", - "Epoch [10/100], Step [600/600], d_loss: 0.1983, g_loss: 4.7957, D(x): 0.94, D(G(z)): 0.04\n", - "Epoch [11/100], Step [200/600], d_loss: 0.2676, g_loss: 7.7167, D(x): 0.94, D(G(z)): 0.02\n", - "Epoch [11/100], Step [400/600], d_loss: 0.1928, g_loss: 3.9627, D(x): 0.94, D(G(z)): 0.08\n", - "Epoch [11/100], Step [600/600], d_loss: 0.2125, g_loss: 3.9180, D(x): 0.93, D(G(z)): 0.07\n", - "Epoch [12/100], Step [200/600], d_loss: 0.2259, g_loss: 4.4337, D(x): 0.93, D(G(z)): 0.07\n", - "Epoch [12/100], Step [400/600], d_loss: 0.1749, g_loss: 4.6158, D(x): 0.96, D(G(z)): 0.07\n", - "Epoch [12/100], Step [600/600], d_loss: 0.4265, g_loss: 4.6571, D(x): 0.91, D(G(z)): 0.13\n", - "Epoch [13/100], Step [200/600], d_loss: 0.4758, g_loss: 3.3905, D(x): 0.90, D(G(z)): 0.10\n", - "Epoch [13/100], Step [400/600], d_loss: 0.3001, g_loss: 4.7539, D(x): 0.92, D(G(z)): 0.06\n", - "Epoch [13/100], Step [600/600], d_loss: 0.3869, g_loss: 4.1512, D(x): 0.87, D(G(z)): 0.07\n", - "Epoch [14/100], Step [200/600], d_loss: 0.1500, g_loss: 3.9485, D(x): 0.93, D(G(z)): 0.04\n", - "Epoch [14/100], Step [400/600], d_loss: 0.3522, g_loss: 4.3756, D(x): 0.92, D(G(z)): 0.09\n", - "Epoch [14/100], Step [600/600], d_loss: 0.3103, g_loss: 2.1960, D(x): 0.93, D(G(z)): 0.13\n", - "Epoch [15/100], Step [200/600], d_loss: 0.3663, g_loss: 3.6767, D(x): 0.92, D(G(z)): 0.12\n", - "Epoch [15/100], Step [400/600], d_loss: 0.3098, g_loss: 4.2520, D(x): 0.88, D(G(z)): 0.10\n", - "Epoch [15/100], Step [600/600], d_loss: 0.2543, g_loss: 3.3442, D(x): 0.94, D(G(z)): 0.10\n", - "Epoch [16/100], Step [200/600], d_loss: 0.5308, g_loss: 3.1811, D(x): 0.87, D(G(z)): 0.12\n", - "Epoch [16/100], Step [400/600], d_loss: 0.3078, g_loss: 3.5513, D(x): 0.92, D(G(z)): 0.13\n", - "Epoch [16/100], Step [600/600], d_loss: 0.3746, g_loss: 3.3609, D(x): 0.89, D(G(z)): 0.09\n", - "Epoch [17/100], Step [200/600], d_loss: 0.4462, g_loss: 3.0213, D(x): 0.85, D(G(z)): 0.11\n", - "Epoch [17/100], Step [400/600], d_loss: 0.3984, g_loss: 3.2388, D(x): 0.89, D(G(z)): 0.13\n", - "Epoch [17/100], Step [600/600], d_loss: 0.2951, g_loss: 3.4063, D(x): 0.93, D(G(z)): 0.13\n", - "Epoch [18/100], Step [200/600], d_loss: 0.3307, g_loss: 3.3891, D(x): 0.95, D(G(z)): 0.19\n", - "Epoch [18/100], Step [400/600], d_loss: 0.5400, g_loss: 3.3652, D(x): 0.87, D(G(z)): 0.15\n", - "Epoch [18/100], Step [600/600], d_loss: 0.2454, g_loss: 3.7429, D(x): 0.91, D(G(z)): 0.08\n", - "Epoch [19/100], Step [200/600], d_loss: 0.4310, g_loss: 3.7976, D(x): 0.89, D(G(z)): 0.15\n", - "Epoch [19/100], Step [400/600], d_loss: 0.5332, g_loss: 4.1109, D(x): 0.87, D(G(z)): 0.12\n", - "Epoch [19/100], Step [600/600], d_loss: 0.6261, g_loss: 2.8194, D(x): 0.85, D(G(z)): 0.09\n", - "Epoch [20/100], Step [200/600], d_loss: 0.4504, g_loss: 4.4613, D(x): 0.89, D(G(z)): 0.10\n", - "Epoch [20/100], Step [400/600], d_loss: 0.5434, g_loss: 2.6971, D(x): 0.88, D(G(z)): 0.16\n", - "Epoch [20/100], Step [600/600], d_loss: 0.5666, g_loss: 3.3807, D(x): 0.81, D(G(z)): 0.07\n", - "Epoch [21/100], Step [200/600], d_loss: 0.6655, g_loss: 2.7393, D(x): 0.81, D(G(z)): 0.08\n", - "Epoch [21/100], Step [400/600], d_loss: 0.1953, g_loss: 4.2922, D(x): 0.95, D(G(z)): 0.10\n", - "Epoch [21/100], Step [600/600], d_loss: 0.4815, g_loss: 3.8682, D(x): 0.84, D(G(z)): 0.07\n", - "Epoch [22/100], Step [200/600], d_loss: 0.6585, g_loss: 3.5895, D(x): 0.81, D(G(z)): 0.10\n", - "Epoch [22/100], Step [400/600], d_loss: 0.4922, g_loss: 3.6637, D(x): 0.84, D(G(z)): 0.08\n", - "Epoch [22/100], Step [600/600], d_loss: 0.4646, g_loss: 2.8834, D(x): 0.83, D(G(z)): 0.10\n", - "Epoch [23/100], Step [200/600], d_loss: 0.6495, g_loss: 3.8976, D(x): 0.85, D(G(z)): 0.11\n", - "Epoch [23/100], Step [400/600], d_loss: 0.5157, g_loss: 2.9286, D(x): 0.86, D(G(z)): 0.15\n", - "Epoch [23/100], Step [600/600], d_loss: 0.4582, g_loss: 3.5402, D(x): 0.86, D(G(z)): 0.10\n", - "Epoch [24/100], Step [200/600], d_loss: 0.4669, g_loss: 3.0499, D(x): 0.88, D(G(z)): 0.12\n", - "Epoch [24/100], Step [400/600], d_loss: 0.4313, g_loss: 3.2897, D(x): 0.88, D(G(z)): 0.14\n", - "Epoch [24/100], Step [600/600], d_loss: 0.3723, g_loss: 3.5267, D(x): 0.86, D(G(z)): 0.08\n", - "Epoch [25/100], Step [200/600], d_loss: 0.7438, g_loss: 2.9299, D(x): 0.81, D(G(z)): 0.14\n", - "Epoch [25/100], Step [400/600], d_loss: 0.9467, g_loss: 2.8801, D(x): 0.71, D(G(z)): 0.14\n", - "Epoch [25/100], Step [600/600], d_loss: 0.4304, g_loss: 2.9585, D(x): 0.85, D(G(z)): 0.12\n", - "Epoch [26/100], Step [200/600], d_loss: 0.4419, g_loss: 2.9025, D(x): 0.86, D(G(z)): 0.14\n", - "Epoch [26/100], Step [400/600], d_loss: 0.3511, g_loss: 4.0447, D(x): 0.93, D(G(z)): 0.18\n", - "Epoch [26/100], Step [600/600], d_loss: 0.8869, g_loss: 3.2814, D(x): 0.79, D(G(z)): 0.19\n", - "Epoch [27/100], Step [200/600], d_loss: 0.4022, g_loss: 2.9591, D(x): 0.87, D(G(z)): 0.09\n", - "Epoch [27/100], Step [400/600], d_loss: 0.5403, g_loss: 3.2807, D(x): 0.78, D(G(z)): 0.08\n", - "Epoch [27/100], Step [600/600], d_loss: 0.7142, g_loss: 3.1566, D(x): 0.87, D(G(z)): 0.22\n", - "Epoch [28/100], Step [200/600], d_loss: 0.5385, g_loss: 2.3096, D(x): 0.83, D(G(z)): 0.16\n", - "Epoch [28/100], Step [400/600], d_loss: 0.4990, g_loss: 2.6239, D(x): 0.86, D(G(z)): 0.17\n", - "Epoch [28/100], Step [600/600], d_loss: 0.5704, g_loss: 2.9197, D(x): 0.83, D(G(z)): 0.19\n", - "Epoch [29/100], Step [200/600], d_loss: 0.3670, g_loss: 3.6132, D(x): 0.87, D(G(z)): 0.10\n", - "Epoch [29/100], Step [400/600], d_loss: 0.5871, g_loss: 2.8933, D(x): 0.84, D(G(z)): 0.22\n", - "Epoch [29/100], Step [600/600], d_loss: 0.4667, g_loss: 3.1334, D(x): 0.81, D(G(z)): 0.08\n", - "Epoch [30/100], Step [200/600], d_loss: 0.4846, g_loss: 3.6970, D(x): 0.83, D(G(z)): 0.11\n", - "Epoch [30/100], Step [400/600], d_loss: 0.7343, g_loss: 2.2464, D(x): 0.84, D(G(z)): 0.21\n" + "Epoch [0/500], d_loss: 0.0353, g_loss: 4.5795, D(x): 0.99, D(G(z)): 0.02\n", + "Epoch [1/500], d_loss: 0.0207, g_loss: 4.8400, D(x): 0.99, D(G(z)): 0.01\n", + "Epoch [2/500], d_loss: 0.0208, g_loss: 6.7210, D(x): 0.99, D(G(z)): 0.01\n", + "Epoch [3/500], d_loss: 0.0562, g_loss: 5.7327, D(x): 0.99, D(G(z)): 0.04\n", + "Epoch [4/500], d_loss: 0.1537, g_loss: 7.3955, D(x): 0.96, D(G(z)): 0.01\n", + "Epoch [5/500], d_loss: 0.2890, g_loss: 5.9182, D(x): 0.93, D(G(z)): 0.04\n", + "Epoch [6/500], d_loss: 0.2335, g_loss: 5.8002, D(x): 0.93, D(G(z)): 0.03\n", + "Epoch [7/500], d_loss: 0.1111, g_loss: 5.1781, D(x): 0.99, D(G(z)): 0.07\n", + "Epoch [8/500], d_loss: 0.3939, g_loss: 6.0438, D(x): 0.89, D(G(z)): 0.01\n", + "Epoch [9/500], d_loss: 0.1982, g_loss: 4.1193, D(x): 0.96, D(G(z)): 0.09\n", + "Epoch [10/500], d_loss: 0.3053, g_loss: 5.0000, D(x): 0.93, D(G(z)): 0.09\n", + "Epoch [11/500], d_loss: 0.3597, g_loss: 3.8982, D(x): 0.87, D(G(z)): 0.05\n", + "Epoch [12/500], d_loss: 0.3082, g_loss: 4.2954, D(x): 0.93, D(G(z)): 0.10\n", + "Epoch [13/500], d_loss: 0.2371, g_loss: 5.3084, D(x): 0.92, D(G(z)): 0.03\n", + "Epoch [14/500], d_loss: 0.3004, g_loss: 4.0390, D(x): 0.94, D(G(z)): 0.11\n", + "Epoch [15/500], d_loss: 0.2624, g_loss: 4.2300, D(x): 0.92, D(G(z)): 0.07\n", + "Epoch [16/500], d_loss: 0.4186, g_loss: 3.2536, D(x): 0.88, D(G(z)): 0.09\n", + "Epoch [17/500], d_loss: 0.2966, g_loss: 3.1124, D(x): 0.95, D(G(z)): 0.17\n", + "Epoch [18/500], d_loss: 0.3810, g_loss: 3.8865, D(x): 0.91, D(G(z)): 0.13\n", + "Epoch [19/500], d_loss: 0.4771, g_loss: 4.1472, D(x): 0.85, D(G(z)): 0.11\n", + "Epoch [20/500], d_loss: 0.4038, g_loss: 2.6455, D(x): 0.92, D(G(z)): 0.18\n", + "Epoch [21/500], d_loss: 0.3549, g_loss: 3.5024, D(x): 0.91, D(G(z)): 0.12\n", + "Epoch [22/500], d_loss: 0.2232, g_loss: 3.2193, D(x): 0.91, D(G(z)): 0.06\n", + "Epoch [23/500], d_loss: 0.3045, g_loss: 3.8436, D(x): 0.90, D(G(z)): 0.13\n", + "Epoch [24/500], d_loss: 0.4334, g_loss: 3.4976, D(x): 0.84, D(G(z)): 0.09\n", + "Epoch [25/500], d_loss: 0.6626, g_loss: 2.8920, D(x): 0.87, D(G(z)): 0.24\n", + "Epoch [26/500], d_loss: 0.3871, g_loss: 3.3799, D(x): 0.88, D(G(z)): 0.12\n", + "Epoch [27/500], d_loss: 0.4851, g_loss: 3.2646, D(x): 0.85, D(G(z)): 0.11\n", + "Epoch [28/500], d_loss: 0.4783, g_loss: 3.6343, D(x): 0.85, D(G(z)): 0.11\n", + "Epoch [29/500], d_loss: 0.4668, g_loss: 2.3135, D(x): 0.87, D(G(z)): 0.21\n", + "Epoch [30/500], d_loss: 0.5571, g_loss: 2.8470, D(x): 0.88, D(G(z)): 0.16\n", + "Epoch [31/500], d_loss: 0.4593, g_loss: 3.2937, D(x): 0.84, D(G(z)): 0.13\n", + "Epoch [32/500], d_loss: 0.5466, g_loss: 3.1846, D(x): 0.81, D(G(z)): 0.13\n", + "Epoch [33/500], d_loss: 0.4839, g_loss: 2.5507, D(x): 0.86, D(G(z)): 0.15\n", + "Epoch [34/500], d_loss: 0.4760, g_loss: 2.6080, D(x): 0.90, D(G(z)): 0.21\n", + "Epoch [35/500], d_loss: 0.5840, g_loss: 3.2098, D(x): 0.82, D(G(z)): 0.16\n", + "Epoch [36/500], d_loss: 0.7232, g_loss: 2.5683, D(x): 0.77, D(G(z)): 0.12\n", + "Epoch [37/500], d_loss: 0.4694, g_loss: 3.0153, D(x): 0.89, D(G(z)): 0.14\n", + "Epoch [38/500], d_loss: 0.5294, g_loss: 2.4553, D(x): 0.82, D(G(z)): 0.15\n", + "Epoch [39/500], d_loss: 0.4357, g_loss: 3.2864, D(x): 0.86, D(G(z)): 0.14\n", + "Epoch [40/500], d_loss: 0.5010, g_loss: 2.9378, D(x): 0.84, D(G(z)): 0.12\n", + "Epoch [41/500], d_loss: 0.6251, g_loss: 2.8454, D(x): 0.79, D(G(z)): 0.12\n", + "Epoch [42/500], d_loss: 0.7347, g_loss: 3.2269, D(x): 0.73, D(G(z)): 0.08\n", + "Epoch [43/500], d_loss: 0.5337, g_loss: 2.6454, D(x): 0.86, D(G(z)): 0.22\n", + "Epoch [44/500], d_loss: 0.5710, g_loss: 2.9795, D(x): 0.78, D(G(z)): 0.13\n", + "Epoch [45/500], d_loss: 0.5998, g_loss: 2.4841, D(x): 0.85, D(G(z)): 0.19\n", + "Epoch [46/500], d_loss: 0.7843, g_loss: 2.1948, D(x): 0.78, D(G(z)): 0.17\n", + "Epoch [47/500], d_loss: 0.6242, g_loss: 2.7627, D(x): 0.85, D(G(z)): 0.25\n", + "Epoch [48/500], d_loss: 0.6231, g_loss: 2.6626, D(x): 0.79, D(G(z)): 0.13\n", + "Epoch [49/500], d_loss: 0.7630, g_loss: 3.5203, D(x): 0.80, D(G(z)): 0.17\n", + "Epoch [50/500], d_loss: 0.5719, g_loss: 2.4131, D(x): 0.81, D(G(z)): 0.16\n", + "Epoch [51/500], d_loss: 0.8938, g_loss: 1.7677, D(x): 0.80, D(G(z)): 0.30\n", + "Epoch [52/500], d_loss: 0.8256, g_loss: 2.4449, D(x): 0.79, D(G(z)): 0.24\n", + "Epoch [53/500], d_loss: 0.5849, g_loss: 2.4096, D(x): 0.86, D(G(z)): 0.23\n", + "Epoch [54/500], d_loss: 0.7261, g_loss: 2.5420, D(x): 0.81, D(G(z)): 0.22\n", + "Epoch [55/500], d_loss: 0.5505, g_loss: 2.3990, D(x): 0.87, D(G(z)): 0.25\n", + "Epoch [56/500], d_loss: 0.8936, g_loss: 3.2751, D(x): 0.75, D(G(z)): 0.24\n", + "Epoch [57/500], d_loss: 0.5805, g_loss: 2.1544, D(x): 0.84, D(G(z)): 0.25\n", + "Epoch [58/500], d_loss: 0.6203, g_loss: 2.3525, D(x): 0.81, D(G(z)): 0.18\n", + "Epoch [59/500], d_loss: 0.6470, g_loss: 2.3724, D(x): 0.78, D(G(z)): 0.19\n", + "Epoch [60/500], d_loss: 0.5552, g_loss: 2.7438, D(x): 0.77, D(G(z)): 0.12\n", + "Epoch [61/500], d_loss: 0.9309, g_loss: 2.5658, D(x): 0.71, D(G(z)): 0.18\n", + "Epoch [62/500], d_loss: 0.5131, g_loss: 2.5370, D(x): 0.86, D(G(z)): 0.20\n", + "Epoch [63/500], d_loss: 0.6984, g_loss: 2.8204, D(x): 0.78, D(G(z)): 0.14\n", + "Epoch [64/500], d_loss: 0.6754, g_loss: 1.7643, D(x): 0.79, D(G(z)): 0.21\n", + "Epoch [65/500], d_loss: 0.5948, g_loss: 2.3132, D(x): 0.85, D(G(z)): 0.24\n", + "Epoch [66/500], d_loss: 0.9070, g_loss: 2.1512, D(x): 0.76, D(G(z)): 0.28\n", + "Epoch [67/500], d_loss: 0.5339, g_loss: 2.6446, D(x): 0.80, D(G(z)): 0.13\n", + "Epoch [68/500], d_loss: 0.7459, g_loss: 2.1979, D(x): 0.80, D(G(z)): 0.25\n", + "Epoch [69/500], d_loss: 0.6126, g_loss: 2.4961, D(x): 0.80, D(G(z)): 0.18\n", + "Epoch [70/500], d_loss: 0.5729, g_loss: 2.8205, D(x): 0.74, D(G(z)): 0.13\n", + "Epoch [71/500], d_loss: 0.7766, g_loss: 2.3885, D(x): 0.72, D(G(z)): 0.16\n", + "Epoch [72/500], d_loss: 0.7822, g_loss: 2.6657, D(x): 0.77, D(G(z)): 0.20\n", + "Epoch [73/500], d_loss: 0.8265, g_loss: 2.6101, D(x): 0.72, D(G(z)): 0.19\n", + "Epoch [74/500], d_loss: 0.6122, g_loss: 2.6391, D(x): 0.84, D(G(z)): 0.23\n", + "Epoch [75/500], d_loss: 0.7539, g_loss: 2.0521, D(x): 0.86, D(G(z)): 0.34\n", + "Epoch [76/500], d_loss: 0.7667, g_loss: 1.8303, D(x): 0.80, D(G(z)): 0.30\n", + "Epoch [77/500], d_loss: 0.6584, g_loss: 2.1489, D(x): 0.88, D(G(z)): 0.30\n", + "Epoch [78/500], d_loss: 0.6255, g_loss: 2.6509, D(x): 0.82, D(G(z)): 0.19\n", + "Epoch [79/500], d_loss: 0.7525, g_loss: 2.3574, D(x): 0.73, D(G(z)): 0.17\n", + "Epoch [80/500], d_loss: 0.7160, g_loss: 1.7728, D(x): 0.81, D(G(z)): 0.29\n", + "Epoch [81/500], d_loss: 0.5326, g_loss: 2.7399, D(x): 0.85, D(G(z)): 0.21\n", + "Epoch [82/500], d_loss: 0.9103, g_loss: 1.6615, D(x): 0.81, D(G(z)): 0.36\n", + "Epoch [83/500], d_loss: 0.8975, g_loss: 2.0681, D(x): 0.72, D(G(z)): 0.21\n", + "Epoch [84/500], d_loss: 0.6805, g_loss: 1.9258, D(x): 0.76, D(G(z)): 0.21\n", + "Epoch [85/500], d_loss: 0.8092, g_loss: 1.6428, D(x): 0.86, D(G(z)): 0.35\n", + "Epoch [86/500], d_loss: 0.5977, g_loss: 2.6141, D(x): 0.82, D(G(z)): 0.18\n", + "Epoch [87/500], d_loss: 0.6894, g_loss: 2.4755, D(x): 0.73, D(G(z)): 0.14\n", + "Epoch [88/500], d_loss: 0.6143, g_loss: 1.7354, D(x): 0.80, D(G(z)): 0.23\n", + "Epoch [89/500], d_loss: 0.7044, g_loss: 2.0786, D(x): 0.80, D(G(z)): 0.26\n", + "Epoch [90/500], d_loss: 0.6391, g_loss: 2.3855, D(x): 0.75, D(G(z)): 0.13\n", + "Epoch [91/500], d_loss: 0.6587, g_loss: 2.2705, D(x): 0.72, D(G(z)): 0.14\n", + "Epoch [92/500], d_loss: 0.8436, g_loss: 2.1588, D(x): 0.77, D(G(z)): 0.26\n", + "Epoch [93/500], d_loss: 0.5117, g_loss: 2.3292, D(x): 0.84, D(G(z)): 0.20\n", + "Epoch [94/500], d_loss: 0.6810, g_loss: 2.4156, D(x): 0.73, D(G(z)): 0.16\n", + "Epoch [95/500], d_loss: 0.4709, g_loss: 2.8735, D(x): 0.84, D(G(z)): 0.15\n", + "Epoch [96/500], d_loss: 0.6960, g_loss: 2.2731, D(x): 0.75, D(G(z)): 0.20\n", + "Epoch [97/500], d_loss: 0.7565, g_loss: 1.7992, D(x): 0.76, D(G(z)): 0.24\n", + "Epoch [98/500], d_loss: 0.8444, g_loss: 2.2126, D(x): 0.68, D(G(z)): 0.20\n", + "Epoch [99/500], d_loss: 0.7740, g_loss: 1.7081, D(x): 0.70, D(G(z)): 0.20\n", + "Epoch [100/500], d_loss: 0.7537, g_loss: 2.4329, D(x): 0.75, D(G(z)): 0.20\n", + "Epoch [101/500], d_loss: 0.7988, g_loss: 1.7567, D(x): 0.76, D(G(z)): 0.27\n", + "Epoch [102/500], d_loss: 0.9476, g_loss: 2.2052, D(x): 0.71, D(G(z)): 0.23\n", + "Epoch [103/500], d_loss: 1.0190, g_loss: 1.7593, D(x): 0.70, D(G(z)): 0.24\n", + "Epoch [104/500], d_loss: 1.0124, g_loss: 1.5721, D(x): 0.77, D(G(z)): 0.37\n", + "Epoch [105/500], d_loss: 0.6753, g_loss: 1.8733, D(x): 0.76, D(G(z)): 0.23\n", + "Epoch [106/500], d_loss: 0.7078, g_loss: 1.9609, D(x): 0.78, D(G(z)): 0.25\n", + "Epoch [107/500], d_loss: 0.7360, g_loss: 2.4600, D(x): 0.75, D(G(z)): 0.20\n", + "Epoch [108/500], d_loss: 0.8563, g_loss: 1.4943, D(x): 0.74, D(G(z)): 0.30\n", + "Epoch [109/500], d_loss: 0.6901, g_loss: 2.3865, D(x): 0.76, D(G(z)): 0.21\n", + "Epoch [110/500], d_loss: 1.0278, g_loss: 1.9630, D(x): 0.69, D(G(z)): 0.26\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [30/100], Step [600/600], d_loss: 0.5736, g_loss: 2.8101, D(x): 0.80, D(G(z)): 0.10\n", - "Epoch [31/100], Step [200/600], d_loss: 0.5028, g_loss: 3.7064, D(x): 0.81, D(G(z)): 0.07\n", - "Epoch [31/100], Step [400/600], d_loss: 0.6508, g_loss: 2.7957, D(x): 0.79, D(G(z)): 0.18\n", - "Epoch [31/100], Step [600/600], d_loss: 0.5380, g_loss: 2.9571, D(x): 0.84, D(G(z)): 0.16\n", - "Epoch [32/100], Step [200/600], d_loss: 0.4424, g_loss: 2.4877, D(x): 0.87, D(G(z)): 0.17\n", - "Epoch [32/100], Step [400/600], d_loss: 0.4118, g_loss: 2.5834, D(x): 0.90, D(G(z)): 0.17\n", - "Epoch [32/100], Step [600/600], d_loss: 0.7510, g_loss: 2.4698, D(x): 0.73, D(G(z)): 0.12\n", - "Epoch [33/100], Step [200/600], d_loss: 0.6722, g_loss: 2.7541, D(x): 0.80, D(G(z)): 0.15\n", - "Epoch [33/100], Step [400/600], d_loss: 0.4703, g_loss: 3.2515, D(x): 0.84, D(G(z)): 0.13\n", - "Epoch [33/100], Step [600/600], d_loss: 0.8260, g_loss: 2.3634, D(x): 0.74, D(G(z)): 0.19\n", - "Epoch [34/100], Step [200/600], d_loss: 0.5580, g_loss: 2.2795, D(x): 0.83, D(G(z)): 0.17\n", - "Epoch [34/100], Step [400/600], d_loss: 0.3844, g_loss: 3.7353, D(x): 0.91, D(G(z)): 0.16\n", - "Epoch [34/100], Step [600/600], d_loss: 0.4763, g_loss: 2.9245, D(x): 0.80, D(G(z)): 0.09\n", - "Epoch [35/100], Step [200/600], d_loss: 0.7082, g_loss: 3.0290, D(x): 0.75, D(G(z)): 0.14\n", - "Epoch [35/100], Step [400/600], d_loss: 0.6366, g_loss: 2.4441, D(x): 0.75, D(G(z)): 0.13\n", - "Epoch [35/100], Step [600/600], d_loss: 0.3602, g_loss: 2.5511, D(x): 0.92, D(G(z)): 0.19\n", - "Epoch [36/100], Step [200/600], d_loss: 0.6494, g_loss: 2.6098, D(x): 0.75, D(G(z)): 0.14\n", - "Epoch [36/100], Step [400/600], d_loss: 0.7318, g_loss: 2.2873, D(x): 0.84, D(G(z)): 0.24\n", - "Epoch [36/100], Step [600/600], d_loss: 0.4189, g_loss: 2.6667, D(x): 0.88, D(G(z)): 0.13\n", - "Epoch [37/100], Step [200/600], d_loss: 0.5482, g_loss: 3.0558, D(x): 0.81, D(G(z)): 0.16\n", - "Epoch [37/100], Step [400/600], d_loss: 0.6997, g_loss: 2.7520, D(x): 0.79, D(G(z)): 0.14\n", - "Epoch [37/100], Step [600/600], d_loss: 0.2648, g_loss: 3.6700, D(x): 0.92, D(G(z)): 0.13\n", - "Epoch [38/100], Step [200/600], d_loss: 0.3936, g_loss: 1.9997, D(x): 0.87, D(G(z)): 0.16\n", - "Epoch [38/100], Step [400/600], d_loss: 0.4968, g_loss: 2.8145, D(x): 0.82, D(G(z)): 0.11\n", - "Epoch [38/100], Step [600/600], d_loss: 0.4657, g_loss: 2.7654, D(x): 0.83, D(G(z)): 0.13\n", - "Epoch [39/100], Step [200/600], d_loss: 0.4711, g_loss: 2.8458, D(x): 0.90, D(G(z)): 0.21\n", - "Epoch [39/100], Step [400/600], d_loss: 0.4008, g_loss: 3.7521, D(x): 0.85, D(G(z)): 0.10\n", - "Epoch [39/100], Step [600/600], d_loss: 0.4650, g_loss: 2.8507, D(x): 0.91, D(G(z)): 0.22\n", - "Epoch [40/100], Step [200/600], d_loss: 0.3269, g_loss: 2.9937, D(x): 0.91, D(G(z)): 0.15\n", - "Epoch [40/100], Step [400/600], d_loss: 0.5217, g_loss: 3.0867, D(x): 0.80, D(G(z)): 0.13\n", - "Epoch [40/100], Step [600/600], d_loss: 0.5557, g_loss: 3.2619, D(x): 0.79, D(G(z)): 0.10\n", - "Epoch [41/100], Step [200/600], d_loss: 0.4923, g_loss: 2.7842, D(x): 0.81, D(G(z)): 0.11\n", - "Epoch [41/100], Step [400/600], d_loss: 0.4994, g_loss: 3.3219, D(x): 0.83, D(G(z)): 0.13\n", - "Epoch [41/100], Step [600/600], d_loss: 0.4599, g_loss: 2.8885, D(x): 0.90, D(G(z)): 0.23\n", - "Epoch [42/100], Step [200/600], d_loss: 0.6313, g_loss: 3.1359, D(x): 0.87, D(G(z)): 0.21\n", - "Epoch [42/100], Step [400/600], d_loss: 0.5508, g_loss: 2.9586, D(x): 0.82, D(G(z)): 0.11\n", - "Epoch [42/100], Step [600/600], d_loss: 0.6769, g_loss: 2.4840, D(x): 0.75, D(G(z)): 0.10\n", - "Epoch [43/100], Step [200/600], d_loss: 0.4893, g_loss: 2.3898, D(x): 0.86, D(G(z)): 0.18\n", - "Epoch [43/100], Step [400/600], d_loss: 0.4797, g_loss: 2.3459, D(x): 0.82, D(G(z)): 0.13\n", - "Epoch [43/100], Step [600/600], d_loss: 0.4791, g_loss: 1.9886, D(x): 0.86, D(G(z)): 0.19\n", - "Epoch [44/100], Step [200/600], d_loss: 0.3527, g_loss: 3.6642, D(x): 0.88, D(G(z)): 0.11\n", - "Epoch [44/100], Step [400/600], d_loss: 0.3834, g_loss: 2.7381, D(x): 0.91, D(G(z)): 0.18\n", - "Epoch [44/100], Step [600/600], d_loss: 0.6438, g_loss: 2.4093, D(x): 0.88, D(G(z)): 0.27\n", - "Epoch [45/100], Step [200/600], d_loss: 0.5501, g_loss: 2.8851, D(x): 0.90, D(G(z)): 0.25\n", - "Epoch [45/100], Step [400/600], d_loss: 0.5142, g_loss: 2.8745, D(x): 0.84, D(G(z)): 0.15\n", - "Epoch [45/100], Step [600/600], d_loss: 0.4721, g_loss: 2.1990, D(x): 0.88, D(G(z)): 0.19\n", - "Epoch [46/100], Step [200/600], d_loss: 0.5641, g_loss: 2.8566, D(x): 0.83, D(G(z)): 0.17\n", - "Epoch [46/100], Step [400/600], d_loss: 0.6218, g_loss: 3.3245, D(x): 0.83, D(G(z)): 0.24\n", - "Epoch [46/100], Step [600/600], d_loss: 0.6173, g_loss: 2.4566, D(x): 0.83, D(G(z)): 0.21\n", - "Epoch [47/100], Step [200/600], d_loss: 0.5530, g_loss: 2.5028, D(x): 0.87, D(G(z)): 0.23\n", - "Epoch [47/100], Step [400/600], d_loss: 0.8854, g_loss: 2.5633, D(x): 0.81, D(G(z)): 0.29\n", - "Epoch [47/100], Step [600/600], d_loss: 0.5736, g_loss: 3.0705, D(x): 0.85, D(G(z)): 0.17\n", - "Epoch [48/100], Step [200/600], d_loss: 0.5449, g_loss: 2.8489, D(x): 0.83, D(G(z)): 0.18\n", - "Epoch [48/100], Step [400/600], d_loss: 0.6590, g_loss: 2.8622, D(x): 0.77, D(G(z)): 0.17\n", - "Epoch [48/100], Step [600/600], d_loss: 0.7175, g_loss: 2.1557, D(x): 0.77, D(G(z)): 0.19\n", - "Epoch [49/100], Step [200/600], d_loss: 0.5540, g_loss: 2.1977, D(x): 0.86, D(G(z)): 0.21\n", - "Epoch [49/100], Step [400/600], d_loss: 0.6489, g_loss: 2.4967, D(x): 0.84, D(G(z)): 0.25\n", - "Epoch [49/100], Step [600/600], d_loss: 0.5870, g_loss: 2.5856, D(x): 0.82, D(G(z)): 0.20\n", - "Epoch [50/100], Step [200/600], d_loss: 0.5164, g_loss: 2.3382, D(x): 0.87, D(G(z)): 0.21\n", - "Epoch [50/100], Step [400/600], d_loss: 0.8033, g_loss: 2.3014, D(x): 0.72, D(G(z)): 0.17\n", - "Epoch [50/100], Step [600/600], d_loss: 0.8258, g_loss: 2.7172, D(x): 0.76, D(G(z)): 0.18\n", - "Epoch [51/100], Step [200/600], d_loss: 0.7045, g_loss: 1.7460, D(x): 0.79, D(G(z)): 0.23\n", - "Epoch [51/100], Step [400/600], d_loss: 0.6292, g_loss: 2.7456, D(x): 0.83, D(G(z)): 0.17\n", - "Epoch [51/100], Step [600/600], d_loss: 0.8915, g_loss: 3.0193, D(x): 0.69, D(G(z)): 0.12\n", - "Epoch [52/100], Step [200/600], d_loss: 0.6594, g_loss: 2.7342, D(x): 0.91, D(G(z)): 0.30\n", - "Epoch [52/100], Step [400/600], d_loss: 0.5980, g_loss: 2.8947, D(x): 0.79, D(G(z)): 0.14\n", - "Epoch [52/100], Step [600/600], d_loss: 0.8133, g_loss: 2.4020, D(x): 0.77, D(G(z)): 0.23\n", - "Epoch [53/100], Step [200/600], d_loss: 0.5583, g_loss: 2.6996, D(x): 0.82, D(G(z)): 0.14\n", - "Epoch [53/100], Step [400/600], d_loss: 0.8307, g_loss: 2.1513, D(x): 0.77, D(G(z)): 0.27\n", - "Epoch [53/100], Step [600/600], d_loss: 0.6789, g_loss: 2.0341, D(x): 0.81, D(G(z)): 0.22\n", - "Epoch [54/100], Step [200/600], d_loss: 0.7146, g_loss: 2.2403, D(x): 0.73, D(G(z)): 0.14\n", - "Epoch [54/100], Step [400/600], d_loss: 0.5241, g_loss: 2.3787, D(x): 0.80, D(G(z)): 0.15\n", - "Epoch [54/100], Step [600/600], d_loss: 0.6304, g_loss: 1.5708, D(x): 0.83, D(G(z)): 0.26\n", - "Epoch [55/100], Step [200/600], d_loss: 0.7945, g_loss: 2.7748, D(x): 0.70, D(G(z)): 0.15\n", - "Epoch [55/100], Step [400/600], d_loss: 0.7019, g_loss: 1.7262, D(x): 0.76, D(G(z)): 0.19\n", - "Epoch [55/100], Step [600/600], d_loss: 0.6129, g_loss: 2.6081, D(x): 0.82, D(G(z)): 0.24\n", - "Epoch [56/100], Step [200/600], d_loss: 0.8178, g_loss: 2.5105, D(x): 0.75, D(G(z)): 0.21\n", - "Epoch [56/100], Step [400/600], d_loss: 0.6320, g_loss: 2.2701, D(x): 0.77, D(G(z)): 0.15\n", - "Epoch [56/100], Step [600/600], d_loss: 0.4877, g_loss: 2.7922, D(x): 0.86, D(G(z)): 0.21\n", - "Epoch [57/100], Step [200/600], d_loss: 0.6353, g_loss: 2.8568, D(x): 0.76, D(G(z)): 0.12\n", - "Epoch [57/100], Step [400/600], d_loss: 0.6107, g_loss: 2.5361, D(x): 0.82, D(G(z)): 0.23\n", - "Epoch [57/100], Step [600/600], d_loss: 0.7215, g_loss: 2.3024, D(x): 0.75, D(G(z)): 0.19\n", - "Epoch [58/100], Step [200/600], d_loss: 0.7068, g_loss: 2.1828, D(x): 0.80, D(G(z)): 0.25\n", - "Epoch [58/100], Step [400/600], d_loss: 0.8305, g_loss: 2.0106, D(x): 0.77, D(G(z)): 0.29\n", - "Epoch [58/100], Step [600/600], d_loss: 0.6946, g_loss: 2.8450, D(x): 0.70, D(G(z)): 0.11\n", - "Epoch [59/100], Step [200/600], d_loss: 0.4576, g_loss: 2.5408, D(x): 0.85, D(G(z)): 0.16\n", - "Epoch [59/100], Step [400/600], d_loss: 0.6791, g_loss: 2.1861, D(x): 0.76, D(G(z)): 0.16\n", - "Epoch [59/100], Step [600/600], d_loss: 0.4857, g_loss: 2.1192, D(x): 0.84, D(G(z)): 0.19\n", - "Epoch [60/100], Step [200/600], d_loss: 0.8066, g_loss: 2.0011, D(x): 0.75, D(G(z)): 0.21\n", - "Epoch [60/100], Step [400/600], d_loss: 0.4817, g_loss: 2.6365, D(x): 0.83, D(G(z)): 0.16\n", - "Epoch [60/100], Step [600/600], d_loss: 1.1641, g_loss: 2.6216, D(x): 0.66, D(G(z)): 0.22\n", - "Epoch [61/100], Step [200/600], d_loss: 0.7387, g_loss: 2.3692, D(x): 0.76, D(G(z)): 0.23\n" + "Epoch [111/500], d_loss: 0.7124, g_loss: 2.2414, D(x): 0.75, D(G(z)): 0.22\n", + "Epoch [112/500], d_loss: 0.9356, g_loss: 1.7013, D(x): 0.69, D(G(z)): 0.24\n", + "Epoch [113/500], d_loss: 0.8027, g_loss: 1.7171, D(x): 0.72, D(G(z)): 0.27\n", + "Epoch [114/500], d_loss: 0.8141, g_loss: 1.6392, D(x): 0.74, D(G(z)): 0.28\n", + "Epoch [115/500], d_loss: 0.8254, g_loss: 1.6652, D(x): 0.71, D(G(z)): 0.22\n", + "Epoch [116/500], d_loss: 0.8622, g_loss: 1.6949, D(x): 0.74, D(G(z)): 0.32\n", + "Epoch [117/500], d_loss: 0.9677, g_loss: 2.0703, D(x): 0.69, D(G(z)): 0.28\n", + "Epoch [118/500], d_loss: 1.1266, g_loss: 1.9132, D(x): 0.61, D(G(z)): 0.17\n", + "Epoch [119/500], d_loss: 0.9547, g_loss: 1.2488, D(x): 0.83, D(G(z)): 0.43\n", + "Epoch [120/500], d_loss: 0.8658, g_loss: 1.3977, D(x): 0.79, D(G(z)): 0.35\n", + "Epoch [121/500], d_loss: 0.8857, g_loss: 1.5309, D(x): 0.71, D(G(z)): 0.29\n", + "Epoch [122/500], d_loss: 0.6393, g_loss: 1.9779, D(x): 0.84, D(G(z)): 0.28\n", + "Epoch [123/500], d_loss: 0.7713, g_loss: 1.7291, D(x): 0.70, D(G(z)): 0.20\n", + "Epoch [124/500], d_loss: 1.0790, g_loss: 1.6057, D(x): 0.67, D(G(z)): 0.33\n", + "Epoch [125/500], d_loss: 0.7385, g_loss: 2.2097, D(x): 0.73, D(G(z)): 0.18\n", + "Epoch [126/500], d_loss: 0.8954, g_loss: 1.4857, D(x): 0.70, D(G(z)): 0.30\n", + "Epoch [127/500], d_loss: 0.9720, g_loss: 1.6348, D(x): 0.67, D(G(z)): 0.29\n", + "Epoch [128/500], d_loss: 0.7679, g_loss: 1.9236, D(x): 0.77, D(G(z)): 0.29\n", + "Epoch [129/500], d_loss: 1.0456, g_loss: 1.7043, D(x): 0.67, D(G(z)): 0.30\n", + "Epoch [130/500], d_loss: 0.9382, g_loss: 1.4580, D(x): 0.74, D(G(z)): 0.36\n", + "Epoch [131/500], d_loss: 0.8874, g_loss: 1.4525, D(x): 0.79, D(G(z)): 0.36\n", + "Epoch [132/500], d_loss: 0.7852, g_loss: 1.5734, D(x): 0.82, D(G(z)): 0.34\n", + "Epoch [133/500], d_loss: 0.8821, g_loss: 1.2828, D(x): 0.78, D(G(z)): 0.37\n", + "Epoch [134/500], d_loss: 0.7562, g_loss: 2.1624, D(x): 0.75, D(G(z)): 0.21\n", + "Epoch [135/500], d_loss: 0.8192, g_loss: 1.9284, D(x): 0.72, D(G(z)): 0.24\n", + "Epoch [136/500], d_loss: 1.0811, g_loss: 1.4803, D(x): 0.65, D(G(z)): 0.29\n", + "Epoch [137/500], d_loss: 0.8183, g_loss: 1.8425, D(x): 0.74, D(G(z)): 0.26\n", + "Epoch [138/500], d_loss: 0.8843, g_loss: 1.6975, D(x): 0.69, D(G(z)): 0.27\n", + "Epoch [139/500], d_loss: 0.9132, g_loss: 2.0930, D(x): 0.65, D(G(z)): 0.23\n", + "Epoch [140/500], d_loss: 0.9628, g_loss: 1.5999, D(x): 0.69, D(G(z)): 0.29\n", + "Epoch [141/500], d_loss: 0.8715, g_loss: 1.7837, D(x): 0.72, D(G(z)): 0.28\n", + "Epoch [142/500], d_loss: 0.9856, g_loss: 1.7957, D(x): 0.65, D(G(z)): 0.25\n", + "Epoch [143/500], d_loss: 0.7947, g_loss: 1.6507, D(x): 0.78, D(G(z)): 0.31\n", + "Epoch [144/500], d_loss: 0.8649, g_loss: 1.5480, D(x): 0.71, D(G(z)): 0.30\n", + "Epoch [145/500], d_loss: 1.0550, g_loss: 1.6807, D(x): 0.69, D(G(z)): 0.34\n", + "Epoch [146/500], d_loss: 0.9706, g_loss: 1.2383, D(x): 0.75, D(G(z)): 0.39\n", + "Epoch [147/500], d_loss: 0.8547, g_loss: 1.5350, D(x): 0.73, D(G(z)): 0.28\n", + "Epoch [148/500], d_loss: 0.9474, g_loss: 1.8240, D(x): 0.64, D(G(z)): 0.23\n", + "Epoch [149/500], d_loss: 0.7990, g_loss: 2.1228, D(x): 0.78, D(G(z)): 0.28\n", + "Epoch [150/500], d_loss: 0.9172, g_loss: 1.8863, D(x): 0.64, D(G(z)): 0.21\n", + "Epoch [151/500], d_loss: 0.8224, g_loss: 1.5267, D(x): 0.71, D(G(z)): 0.26\n", + "Epoch [152/500], d_loss: 1.0207, g_loss: 1.3409, D(x): 0.70, D(G(z)): 0.37\n", + "Epoch [153/500], d_loss: 0.7453, g_loss: 1.9378, D(x): 0.70, D(G(z)): 0.21\n", + "Epoch [154/500], d_loss: 0.9793, g_loss: 1.8074, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [155/500], d_loss: 0.7143, g_loss: 1.8886, D(x): 0.75, D(G(z)): 0.25\n", + "Epoch [156/500], d_loss: 1.0511, g_loss: 1.3900, D(x): 0.65, D(G(z)): 0.33\n", + "Epoch [157/500], d_loss: 0.9450, g_loss: 1.5915, D(x): 0.65, D(G(z)): 0.28\n", + "Epoch [158/500], d_loss: 1.0519, g_loss: 1.5531, D(x): 0.64, D(G(z)): 0.30\n", + "Epoch [159/500], d_loss: 1.0894, g_loss: 1.5769, D(x): 0.65, D(G(z)): 0.31\n", + "Epoch [160/500], d_loss: 0.9136, g_loss: 1.3484, D(x): 0.72, D(G(z)): 0.34\n", + "Epoch [161/500], d_loss: 0.9079, g_loss: 1.5181, D(x): 0.70, D(G(z)): 0.31\n", + "Epoch [162/500], d_loss: 0.9189, g_loss: 1.5447, D(x): 0.69, D(G(z)): 0.30\n", + "Epoch [163/500], d_loss: 0.8701, g_loss: 1.7429, D(x): 0.67, D(G(z)): 0.25\n", + "Epoch [164/500], d_loss: 1.1158, g_loss: 1.1049, D(x): 0.69, D(G(z)): 0.40\n", + "Epoch [165/500], d_loss: 1.0878, g_loss: 1.1434, D(x): 0.71, D(G(z)): 0.41\n", + "Epoch [166/500], d_loss: 1.0542, g_loss: 1.5165, D(x): 0.63, D(G(z)): 0.30\n", + "Epoch [167/500], d_loss: 0.9092, g_loss: 1.6487, D(x): 0.72, D(G(z)): 0.29\n", + "Epoch [168/500], d_loss: 0.8574, g_loss: 1.6596, D(x): 0.74, D(G(z)): 0.31\n", + "Epoch [169/500], d_loss: 0.8281, g_loss: 1.4313, D(x): 0.75, D(G(z)): 0.32\n", + "Epoch [170/500], d_loss: 1.2006, g_loss: 1.5103, D(x): 0.63, D(G(z)): 0.34\n", + "Epoch [171/500], d_loss: 1.0200, g_loss: 1.3363, D(x): 0.75, D(G(z)): 0.39\n", + "Epoch [172/500], d_loss: 0.9650, g_loss: 1.6249, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [173/500], d_loss: 1.1926, g_loss: 1.3233, D(x): 0.67, D(G(z)): 0.40\n", + "Epoch [174/500], d_loss: 0.9956, g_loss: 1.4485, D(x): 0.65, D(G(z)): 0.32\n", + "Epoch [175/500], d_loss: 0.8553, g_loss: 1.6937, D(x): 0.68, D(G(z)): 0.26\n", + "Epoch [176/500], d_loss: 1.0447, g_loss: 1.4221, D(x): 0.70, D(G(z)): 0.36\n", + "Epoch [177/500], d_loss: 1.1053, g_loss: 1.2209, D(x): 0.62, D(G(z)): 0.33\n", + "Epoch [178/500], d_loss: 0.8936, g_loss: 1.4434, D(x): 0.71, D(G(z)): 0.30\n", + "Epoch [179/500], d_loss: 0.8940, g_loss: 1.9409, D(x): 0.68, D(G(z)): 0.26\n", + "Epoch [180/500], d_loss: 1.0845, g_loss: 1.3372, D(x): 0.66, D(G(z)): 0.35\n", + "Epoch [181/500], d_loss: 0.8868, g_loss: 1.7083, D(x): 0.67, D(G(z)): 0.26\n", + "Epoch [182/500], d_loss: 0.8771, g_loss: 1.4841, D(x): 0.75, D(G(z)): 0.36\n", + "Epoch [183/500], d_loss: 0.8713, g_loss: 1.4353, D(x): 0.72, D(G(z)): 0.32\n", + "Epoch [184/500], d_loss: 1.0071, g_loss: 1.4673, D(x): 0.67, D(G(z)): 0.32\n", + "Epoch [185/500], d_loss: 0.8797, g_loss: 1.6886, D(x): 0.72, D(G(z)): 0.30\n", + "Epoch [186/500], d_loss: 0.6842, g_loss: 1.9004, D(x): 0.74, D(G(z)): 0.23\n", + "Epoch [187/500], d_loss: 0.9018, g_loss: 1.8251, D(x): 0.65, D(G(z)): 0.24\n", + "Epoch [188/500], d_loss: 1.0214, g_loss: 1.4530, D(x): 0.64, D(G(z)): 0.33\n", + "Epoch [189/500], d_loss: 0.8291, g_loss: 1.5005, D(x): 0.72, D(G(z)): 0.31\n", + "Epoch [190/500], d_loss: 0.9407, g_loss: 1.4170, D(x): 0.69, D(G(z)): 0.34\n", + "Epoch [191/500], d_loss: 1.0630, g_loss: 1.4492, D(x): 0.66, D(G(z)): 0.33\n", + "Epoch [192/500], d_loss: 0.9868, g_loss: 1.2613, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [193/500], d_loss: 0.8190, g_loss: 1.6483, D(x): 0.69, D(G(z)): 0.27\n", + "Epoch [194/500], d_loss: 0.9843, g_loss: 1.6070, D(x): 0.61, D(G(z)): 0.24\n", + "Epoch [195/500], d_loss: 0.8839, g_loss: 1.4878, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [196/500], d_loss: 1.1545, g_loss: 1.3165, D(x): 0.66, D(G(z)): 0.40\n", + "Epoch [197/500], d_loss: 1.0539, g_loss: 1.3457, D(x): 0.66, D(G(z)): 0.32\n", + "Epoch [198/500], d_loss: 1.1856, g_loss: 1.4515, D(x): 0.63, D(G(z)): 0.37\n", + "Epoch [199/500], d_loss: 1.0534, g_loss: 1.4062, D(x): 0.71, D(G(z)): 0.39\n", + "Epoch [200/500], d_loss: 1.1110, g_loss: 1.2928, D(x): 0.65, D(G(z)): 0.35\n", + "Epoch [201/500], d_loss: 0.8340, g_loss: 1.6042, D(x): 0.72, D(G(z)): 0.31\n", + "Epoch [202/500], d_loss: 1.0632, g_loss: 1.2951, D(x): 0.72, D(G(z)): 0.40\n", + "Epoch [203/500], d_loss: 1.0884, g_loss: 1.3210, D(x): 0.63, D(G(z)): 0.31\n", + "Epoch [204/500], d_loss: 1.0211, g_loss: 1.6223, D(x): 0.62, D(G(z)): 0.29\n", + "Epoch [205/500], d_loss: 0.8629, g_loss: 1.3774, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [206/500], d_loss: 1.0650, g_loss: 1.2776, D(x): 0.69, D(G(z)): 0.37\n", + "Epoch [207/500], d_loss: 0.8520, g_loss: 1.6925, D(x): 0.66, D(G(z)): 0.26\n", + "Epoch [208/500], d_loss: 1.1944, g_loss: 1.2044, D(x): 0.62, D(G(z)): 0.37\n", + "Epoch [209/500], d_loss: 0.9975, g_loss: 1.3585, D(x): 0.74, D(G(z)): 0.37\n", + "Epoch [210/500], d_loss: 1.0218, g_loss: 1.1609, D(x): 0.73, D(G(z)): 0.41\n", + "Epoch [211/500], d_loss: 1.1609, g_loss: 1.2328, D(x): 0.68, D(G(z)): 0.41\n", + "Epoch [212/500], d_loss: 0.9469, g_loss: 1.3545, D(x): 0.73, D(G(z)): 0.37\n", + "Epoch [213/500], d_loss: 1.0757, g_loss: 1.2907, D(x): 0.62, D(G(z)): 0.34\n", + "Epoch [214/500], d_loss: 0.8357, g_loss: 1.3630, D(x): 0.74, D(G(z)): 0.32\n", + "Epoch [215/500], d_loss: 0.8628, g_loss: 1.4314, D(x): 0.71, D(G(z)): 0.32\n", + "Epoch [216/500], d_loss: 1.1747, g_loss: 1.5706, D(x): 0.66, D(G(z)): 0.36\n", + "Epoch [217/500], d_loss: 0.8554, g_loss: 1.6735, D(x): 0.68, D(G(z)): 0.26\n", + "Epoch [218/500], d_loss: 1.0675, g_loss: 1.4413, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [219/500], d_loss: 0.8595, g_loss: 1.5629, D(x): 0.68, D(G(z)): 0.29\n", + "Epoch [220/500], d_loss: 0.6933, g_loss: 1.8261, D(x): 0.73, D(G(z)): 0.23\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [61/100], Step [400/600], d_loss: 0.9973, g_loss: 2.8761, D(x): 0.73, D(G(z)): 0.21\n", - "Epoch [61/100], Step [600/600], d_loss: 0.5597, g_loss: 3.2259, D(x): 0.87, D(G(z)): 0.24\n", - "Epoch [62/100], Step [200/600], d_loss: 0.9815, g_loss: 1.9172, D(x): 0.71, D(G(z)): 0.25\n", - "Epoch [62/100], Step [400/600], d_loss: 0.7774, g_loss: 2.0705, D(x): 0.71, D(G(z)): 0.16\n", - "Epoch [62/100], Step [600/600], d_loss: 0.7258, g_loss: 2.2306, D(x): 0.83, D(G(z)): 0.28\n", - "Epoch [63/100], Step [200/600], d_loss: 0.7889, g_loss: 2.1573, D(x): 0.70, D(G(z)): 0.15\n", - "Epoch [63/100], Step [400/600], d_loss: 0.5854, g_loss: 2.8319, D(x): 0.82, D(G(z)): 0.18\n", - "Epoch [63/100], Step [600/600], d_loss: 0.7550, g_loss: 3.0140, D(x): 0.77, D(G(z)): 0.18\n", - "Epoch [64/100], Step [200/600], d_loss: 0.8088, g_loss: 3.1151, D(x): 0.76, D(G(z)): 0.22\n", - "Epoch [64/100], Step [400/600], d_loss: 0.7828, g_loss: 2.3200, D(x): 0.71, D(G(z)): 0.17\n", - "Epoch [64/100], Step [600/600], d_loss: 0.7833, g_loss: 2.2829, D(x): 0.79, D(G(z)): 0.27\n", - "Epoch [65/100], Step [200/600], d_loss: 0.6672, g_loss: 2.3168, D(x): 0.79, D(G(z)): 0.21\n", - "Epoch [65/100], Step [400/600], d_loss: 0.5611, g_loss: 2.0441, D(x): 0.80, D(G(z)): 0.20\n", - "Epoch [65/100], Step [600/600], d_loss: 0.7871, g_loss: 1.7177, D(x): 0.79, D(G(z)): 0.29\n", - "Epoch [66/100], Step [200/600], d_loss: 0.6754, g_loss: 2.2386, D(x): 0.79, D(G(z)): 0.23\n", - "Epoch [66/100], Step [400/600], d_loss: 0.5974, g_loss: 2.0712, D(x): 0.78, D(G(z)): 0.19\n", - "Epoch [66/100], Step [600/600], d_loss: 0.7797, g_loss: 2.5513, D(x): 0.72, D(G(z)): 0.18\n", - "Epoch [67/100], Step [200/600], d_loss: 0.8166, g_loss: 2.3050, D(x): 0.73, D(G(z)): 0.21\n", - "Epoch [67/100], Step [400/600], d_loss: 0.8455, g_loss: 1.9247, D(x): 0.73, D(G(z)): 0.22\n", - "Epoch [67/100], Step [600/600], d_loss: 0.8843, g_loss: 1.9625, D(x): 0.78, D(G(z)): 0.31\n", - "Epoch [68/100], Step [200/600], d_loss: 0.8341, g_loss: 2.2858, D(x): 0.66, D(G(z)): 0.15\n", - "Epoch [68/100], Step [400/600], d_loss: 0.8814, g_loss: 2.5744, D(x): 0.78, D(G(z)): 0.26\n", - "Epoch [68/100], Step [600/600], d_loss: 0.9402, g_loss: 2.2109, D(x): 0.72, D(G(z)): 0.25\n", - "Epoch [69/100], Step [200/600], d_loss: 0.8859, g_loss: 1.8589, D(x): 0.80, D(G(z)): 0.34\n", - "Epoch [69/100], Step [400/600], d_loss: 1.1010, g_loss: 1.7917, D(x): 0.73, D(G(z)): 0.35\n", - "Epoch [69/100], Step [600/600], d_loss: 0.8336, g_loss: 2.0538, D(x): 0.76, D(G(z)): 0.29\n", - "Epoch [70/100], Step [200/600], d_loss: 0.6562, g_loss: 1.9388, D(x): 0.78, D(G(z)): 0.19\n", - "Epoch [70/100], Step [400/600], d_loss: 0.6752, g_loss: 2.1560, D(x): 0.80, D(G(z)): 0.24\n", - "Epoch [70/100], Step [600/600], d_loss: 0.5649, g_loss: 2.6818, D(x): 0.82, D(G(z)): 0.19\n", - "Epoch [71/100], Step [200/600], d_loss: 0.6391, g_loss: 2.3606, D(x): 0.80, D(G(z)): 0.20\n", - "Epoch [71/100], Step [400/600], d_loss: 1.1834, g_loss: 2.4686, D(x): 0.69, D(G(z)): 0.32\n", - "Epoch [71/100], Step [600/600], d_loss: 0.6769, g_loss: 2.9032, D(x): 0.77, D(G(z)): 0.21\n", - "Epoch [72/100], Step [200/600], d_loss: 1.0312, g_loss: 1.8052, D(x): 0.66, D(G(z)): 0.26\n", - "Epoch [72/100], Step [400/600], d_loss: 1.0099, g_loss: 2.1026, D(x): 0.77, D(G(z)): 0.32\n", - "Epoch [72/100], Step [600/600], d_loss: 0.7794, g_loss: 1.8779, D(x): 0.71, D(G(z)): 0.20\n", - "Epoch [73/100], Step [200/600], d_loss: 0.7900, g_loss: 2.1821, D(x): 0.74, D(G(z)): 0.25\n", - "Epoch [73/100], Step [400/600], d_loss: 0.5329, g_loss: 2.5052, D(x): 0.86, D(G(z)): 0.22\n", - "Epoch [73/100], Step [600/600], d_loss: 0.5456, g_loss: 2.3821, D(x): 0.83, D(G(z)): 0.19\n", - "Epoch [74/100], Step [200/600], d_loss: 0.7718, g_loss: 1.8728, D(x): 0.78, D(G(z)): 0.28\n", - "Epoch [74/100], Step [400/600], d_loss: 0.8062, g_loss: 2.2049, D(x): 0.72, D(G(z)): 0.22\n", - "Epoch [74/100], Step [600/600], d_loss: 0.6960, g_loss: 1.6878, D(x): 0.80, D(G(z)): 0.27\n", - "Epoch [75/100], Step [200/600], d_loss: 0.7569, g_loss: 2.0632, D(x): 0.73, D(G(z)): 0.21\n", - "Epoch [75/100], Step [400/600], d_loss: 0.6159, g_loss: 2.3796, D(x): 0.82, D(G(z)): 0.26\n", - "Epoch [75/100], Step [600/600], d_loss: 0.7795, g_loss: 2.4644, D(x): 0.69, D(G(z)): 0.18\n", - "Epoch [76/100], Step [200/600], d_loss: 0.7299, g_loss: 2.1992, D(x): 0.81, D(G(z)): 0.25\n", - "Epoch [76/100], Step [400/600], d_loss: 0.9521, g_loss: 2.1210, D(x): 0.75, D(G(z)): 0.33\n", - "Epoch [76/100], Step [600/600], d_loss: 0.9014, g_loss: 2.2860, D(x): 0.77, D(G(z)): 0.28\n", - "Epoch [77/100], Step [200/600], d_loss: 0.7484, g_loss: 2.1562, D(x): 0.76, D(G(z)): 0.24\n", - "Epoch [77/100], Step [400/600], d_loss: 0.7104, g_loss: 1.6772, D(x): 0.79, D(G(z)): 0.26\n", - "Epoch [77/100], Step [600/600], d_loss: 0.6114, g_loss: 1.5497, D(x): 0.82, D(G(z)): 0.24\n", - "Epoch [78/100], Step [200/600], d_loss: 0.6991, g_loss: 1.9616, D(x): 0.80, D(G(z)): 0.27\n", - "Epoch [78/100], Step [400/600], d_loss: 0.5768, g_loss: 1.8617, D(x): 0.80, D(G(z)): 0.20\n", - "Epoch [78/100], Step [600/600], d_loss: 0.5998, g_loss: 1.9668, D(x): 0.80, D(G(z)): 0.21\n", - "Epoch [79/100], Step [200/600], d_loss: 0.6500, g_loss: 1.6905, D(x): 0.82, D(G(z)): 0.26\n", - "Epoch [79/100], Step [400/600], d_loss: 0.7683, g_loss: 2.4840, D(x): 0.74, D(G(z)): 0.23\n", - "Epoch [79/100], Step [600/600], d_loss: 0.6214, g_loss: 2.1872, D(x): 0.81, D(G(z)): 0.26\n", - "Epoch [80/100], Step [200/600], d_loss: 0.6392, g_loss: 2.0616, D(x): 0.75, D(G(z)): 0.17\n", - "Epoch [80/100], Step [400/600], d_loss: 0.5256, g_loss: 2.1704, D(x): 0.82, D(G(z)): 0.21\n", - "Epoch [80/100], Step [600/600], d_loss: 1.0950, g_loss: 1.5804, D(x): 0.73, D(G(z)): 0.32\n", - "Epoch [81/100], Step [200/600], d_loss: 0.6317, g_loss: 2.5199, D(x): 0.82, D(G(z)): 0.22\n", - "Epoch [81/100], Step [400/600], d_loss: 0.8751, g_loss: 2.0601, D(x): 0.84, D(G(z)): 0.34\n", - "Epoch [81/100], Step [600/600], d_loss: 0.7656, g_loss: 1.9939, D(x): 0.74, D(G(z)): 0.24\n", - "Epoch [82/100], Step [200/600], d_loss: 1.0459, g_loss: 1.7681, D(x): 0.69, D(G(z)): 0.25\n", - "Epoch [82/100], Step [400/600], d_loss: 0.7828, g_loss: 2.1891, D(x): 0.73, D(G(z)): 0.21\n", - "Epoch [82/100], Step [600/600], d_loss: 0.8005, g_loss: 2.3678, D(x): 0.72, D(G(z)): 0.24\n", - "Epoch [83/100], Step [200/600], d_loss: 0.8111, g_loss: 1.7620, D(x): 0.80, D(G(z)): 0.32\n", - "Epoch [83/100], Step [400/600], d_loss: 0.7252, g_loss: 2.3101, D(x): 0.82, D(G(z)): 0.24\n", - "Epoch [83/100], Step [600/600], d_loss: 0.7177, g_loss: 2.4474, D(x): 0.77, D(G(z)): 0.19\n", - "Epoch [84/100], Step [200/600], d_loss: 0.9098, g_loss: 1.9818, D(x): 0.72, D(G(z)): 0.26\n", - "Epoch [84/100], Step [400/600], d_loss: 0.7574, g_loss: 1.6871, D(x): 0.76, D(G(z)): 0.25\n", - "Epoch [84/100], Step [600/600], d_loss: 0.8107, g_loss: 2.1725, D(x): 0.75, D(G(z)): 0.24\n", - "Epoch [85/100], Step [200/600], d_loss: 0.6158, g_loss: 2.3403, D(x): 0.78, D(G(z)): 0.18\n", - "Epoch [85/100], Step [400/600], d_loss: 0.5934, g_loss: 2.2655, D(x): 0.84, D(G(z)): 0.22\n", - "Epoch [85/100], Step [600/600], d_loss: 0.7513, g_loss: 2.1467, D(x): 0.76, D(G(z)): 0.24\n", - "Epoch [86/100], Step [200/600], d_loss: 1.0098, g_loss: 1.7394, D(x): 0.76, D(G(z)): 0.31\n", - "Epoch [86/100], Step [400/600], d_loss: 1.0568, g_loss: 1.9811, D(x): 0.72, D(G(z)): 0.32\n", - "Epoch [86/100], Step [600/600], d_loss: 0.7181, g_loss: 1.8037, D(x): 0.81, D(G(z)): 0.30\n", - "Epoch [87/100], Step [200/600], d_loss: 0.6905, g_loss: 2.0376, D(x): 0.80, D(G(z)): 0.23\n", - "Epoch [87/100], Step [400/600], d_loss: 0.8590, g_loss: 1.9530, D(x): 0.77, D(G(z)): 0.28\n", - "Epoch [87/100], Step [600/600], d_loss: 0.7231, g_loss: 2.3664, D(x): 0.77, D(G(z)): 0.28\n", - "Epoch [88/100], Step [200/600], d_loss: 0.7109, g_loss: 1.7531, D(x): 0.81, D(G(z)): 0.30\n", - "Epoch [88/100], Step [400/600], d_loss: 0.7651, g_loss: 1.6456, D(x): 0.77, D(G(z)): 0.29\n", - "Epoch [88/100], Step [600/600], d_loss: 0.7218, g_loss: 2.1285, D(x): 0.74, D(G(z)): 0.24\n", - "Epoch [89/100], Step [200/600], d_loss: 0.7109, g_loss: 1.3709, D(x): 0.81, D(G(z)): 0.31\n", - "Epoch [89/100], Step [400/600], d_loss: 0.6825, g_loss: 1.6596, D(x): 0.76, D(G(z)): 0.21\n", - "Epoch [89/100], Step [600/600], d_loss: 0.6925, g_loss: 1.7741, D(x): 0.76, D(G(z)): 0.24\n", - "Epoch [90/100], Step [200/600], d_loss: 0.7440, g_loss: 1.7874, D(x): 0.72, D(G(z)): 0.22\n", - "Epoch [90/100], Step [400/600], d_loss: 0.6042, g_loss: 2.3325, D(x): 0.79, D(G(z)): 0.23\n", - "Epoch [90/100], Step [600/600], d_loss: 0.5725, g_loss: 2.4630, D(x): 0.80, D(G(z)): 0.21\n", - "Epoch [91/100], Step [200/600], d_loss: 0.7155, g_loss: 2.1033, D(x): 0.78, D(G(z)): 0.26\n", - "Epoch [91/100], Step [400/600], d_loss: 0.8821, g_loss: 1.5430, D(x): 0.74, D(G(z)): 0.30\n", - "Epoch [91/100], Step [600/600], d_loss: 0.8353, g_loss: 2.5022, D(x): 0.75, D(G(z)): 0.21\n" + "Epoch [221/500], d_loss: 1.1263, g_loss: 1.3004, D(x): 0.66, D(G(z)): 0.39\n", + "Epoch [222/500], d_loss: 1.1092, g_loss: 1.2381, D(x): 0.64, D(G(z)): 0.37\n", + "Epoch [223/500], d_loss: 0.8626, g_loss: 1.3710, D(x): 0.72, D(G(z)): 0.32\n", + "Epoch [224/500], d_loss: 0.9928, g_loss: 1.3832, D(x): 0.73, D(G(z)): 0.38\n", + "Epoch [225/500], d_loss: 1.0119, g_loss: 1.5089, D(x): 0.64, D(G(z)): 0.31\n", + "Epoch [226/500], d_loss: 1.0609, g_loss: 1.6157, D(x): 0.67, D(G(z)): 0.32\n", + "Epoch [227/500], d_loss: 0.8652, g_loss: 1.4949, D(x): 0.72, D(G(z)): 0.33\n", + "Epoch [228/500], d_loss: 1.0230, g_loss: 1.5590, D(x): 0.65, D(G(z)): 0.32\n", + "Epoch [229/500], d_loss: 0.9471, g_loss: 1.4736, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [230/500], d_loss: 1.2314, g_loss: 1.3161, D(x): 0.58, D(G(z)): 0.35\n", + "Epoch [231/500], d_loss: 0.8634, g_loss: 1.4010, D(x): 0.74, D(G(z)): 0.32\n", + "Epoch [232/500], d_loss: 1.0469, g_loss: 1.2846, D(x): 0.67, D(G(z)): 0.36\n", + "Epoch [233/500], d_loss: 1.0392, g_loss: 1.4090, D(x): 0.63, D(G(z)): 0.31\n", + "Epoch [234/500], d_loss: 0.9939, g_loss: 1.3455, D(x): 0.64, D(G(z)): 0.31\n", + "Epoch [235/500], d_loss: 0.7709, g_loss: 1.7876, D(x): 0.74, D(G(z)): 0.27\n", + "Epoch [236/500], d_loss: 0.7895, g_loss: 1.6656, D(x): 0.70, D(G(z)): 0.27\n", + "Epoch [237/500], d_loss: 1.1511, g_loss: 1.2525, D(x): 0.62, D(G(z)): 0.37\n", + "Epoch [238/500], d_loss: 0.8322, g_loss: 1.5956, D(x): 0.69, D(G(z)): 0.28\n", + "Epoch [239/500], d_loss: 0.9740, g_loss: 1.2187, D(x): 0.71, D(G(z)): 0.36\n", + "Epoch [240/500], d_loss: 1.3437, g_loss: 1.2492, D(x): 0.53, D(G(z)): 0.35\n", + "Epoch [241/500], d_loss: 0.8933, g_loss: 1.6930, D(x): 0.65, D(G(z)): 0.25\n", + "Epoch [242/500], d_loss: 1.2746, g_loss: 1.3917, D(x): 0.56, D(G(z)): 0.32\n", + "Epoch [243/500], d_loss: 1.1671, g_loss: 1.2340, D(x): 0.63, D(G(z)): 0.37\n", + "Epoch [244/500], d_loss: 0.7743, g_loss: 1.5363, D(x): 0.70, D(G(z)): 0.26\n", + "Epoch [245/500], d_loss: 1.1623, g_loss: 1.4239, D(x): 0.60, D(G(z)): 0.33\n", + "Epoch [246/500], d_loss: 1.0753, g_loss: 1.3745, D(x): 0.60, D(G(z)): 0.32\n", + "Epoch [247/500], d_loss: 0.9158, g_loss: 1.5208, D(x): 0.74, D(G(z)): 0.35\n", + "Epoch [248/500], d_loss: 1.0055, g_loss: 1.3679, D(x): 0.64, D(G(z)): 0.32\n", + "Epoch [249/500], d_loss: 1.2974, g_loss: 1.3794, D(x): 0.60, D(G(z)): 0.39\n", + "Epoch [250/500], d_loss: 0.8648, g_loss: 1.5861, D(x): 0.74, D(G(z)): 0.30\n", + "Epoch [251/500], d_loss: 1.1756, g_loss: 1.1416, D(x): 0.61, D(G(z)): 0.37\n", + "Epoch [252/500], d_loss: 1.0145, g_loss: 1.1828, D(x): 0.69, D(G(z)): 0.37\n", + "Epoch [253/500], d_loss: 1.3051, g_loss: 1.0873, D(x): 0.58, D(G(z)): 0.41\n", + "Epoch [254/500], d_loss: 1.0138, g_loss: 1.3082, D(x): 0.67, D(G(z)): 0.34\n", + "Epoch [255/500], d_loss: 1.0561, g_loss: 1.2731, D(x): 0.69, D(G(z)): 0.38\n", + "Epoch [256/500], d_loss: 1.1814, g_loss: 1.3217, D(x): 0.63, D(G(z)): 0.37\n", + "Epoch [257/500], d_loss: 1.1986, g_loss: 1.0774, D(x): 0.66, D(G(z)): 0.42\n", + "Epoch [258/500], d_loss: 1.3802, g_loss: 1.0329, D(x): 0.60, D(G(z)): 0.44\n", + "Epoch [259/500], d_loss: 1.2976, g_loss: 1.2203, D(x): 0.57, D(G(z)): 0.38\n", + "Epoch [260/500], d_loss: 1.2745, g_loss: 1.3612, D(x): 0.59, D(G(z)): 0.34\n", + "Epoch [261/500], d_loss: 0.9781, g_loss: 1.4583, D(x): 0.67, D(G(z)): 0.30\n", + "Epoch [262/500], d_loss: 1.0238, g_loss: 1.2880, D(x): 0.63, D(G(z)): 0.33\n", + "Epoch [263/500], d_loss: 1.2486, g_loss: 1.1443, D(x): 0.59, D(G(z)): 0.36\n", + "Epoch [264/500], d_loss: 0.7975, g_loss: 1.3215, D(x): 0.75, D(G(z)): 0.33\n", + "Epoch [265/500], d_loss: 0.9316, g_loss: 1.5436, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [266/500], d_loss: 1.1887, g_loss: 1.0840, D(x): 0.68, D(G(z)): 0.44\n", + "Epoch [267/500], d_loss: 1.2330, g_loss: 1.1703, D(x): 0.56, D(G(z)): 0.37\n", + "Epoch [268/500], d_loss: 0.8973, g_loss: 1.4848, D(x): 0.67, D(G(z)): 0.29\n", + "Epoch [269/500], d_loss: 0.7452, g_loss: 1.6125, D(x): 0.75, D(G(z)): 0.29\n", + "Epoch [270/500], d_loss: 1.2992, g_loss: 1.3898, D(x): 0.57, D(G(z)): 0.34\n", + "Epoch [271/500], d_loss: 1.0520, g_loss: 1.1962, D(x): 0.67, D(G(z)): 0.38\n", + "Epoch [272/500], d_loss: 0.9055, g_loss: 1.4353, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [273/500], d_loss: 1.2402, g_loss: 1.1550, D(x): 0.60, D(G(z)): 0.38\n", + "Epoch [274/500], d_loss: 1.0007, g_loss: 1.4668, D(x): 0.66, D(G(z)): 0.30\n", + "Epoch [275/500], d_loss: 1.2061, g_loss: 1.1666, D(x): 0.59, D(G(z)): 0.35\n", + "Epoch [276/500], d_loss: 0.9925, g_loss: 1.6960, D(x): 0.63, D(G(z)): 0.27\n", + "Epoch [277/500], d_loss: 1.4039, g_loss: 1.2600, D(x): 0.58, D(G(z)): 0.39\n", + "Epoch [278/500], d_loss: 0.8794, g_loss: 1.3767, D(x): 0.75, D(G(z)): 0.36\n", + "Epoch [279/500], d_loss: 0.9797, g_loss: 1.2642, D(x): 0.68, D(G(z)): 0.36\n", + "Epoch [280/500], d_loss: 1.3058, g_loss: 1.2579, D(x): 0.56, D(G(z)): 0.36\n", + "Epoch [281/500], d_loss: 1.0888, g_loss: 1.2654, D(x): 0.62, D(G(z)): 0.34\n", + "Epoch [282/500], d_loss: 1.0025, g_loss: 1.5665, D(x): 0.67, D(G(z)): 0.31\n", + "Epoch [283/500], d_loss: 0.9770, g_loss: 1.2810, D(x): 0.66, D(G(z)): 0.33\n", + "Epoch [284/500], d_loss: 0.8420, g_loss: 1.5749, D(x): 0.69, D(G(z)): 0.28\n", + "Epoch [285/500], d_loss: 1.0965, g_loss: 1.3816, D(x): 0.62, D(G(z)): 0.34\n", + "Epoch [286/500], d_loss: 1.5135, g_loss: 1.2000, D(x): 0.57, D(G(z)): 0.43\n", + "Epoch [287/500], d_loss: 1.4430, g_loss: 1.0707, D(x): 0.64, D(G(z)): 0.46\n", + "Epoch [288/500], d_loss: 0.8870, g_loss: 1.4633, D(x): 0.65, D(G(z)): 0.29\n", + "Epoch [289/500], d_loss: 1.0973, g_loss: 1.2601, D(x): 0.65, D(G(z)): 0.37\n", + "Epoch [290/500], d_loss: 1.0451, g_loss: 1.3617, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [291/500], d_loss: 1.0204, g_loss: 1.3929, D(x): 0.67, D(G(z)): 0.33\n", + "Epoch [292/500], d_loss: 0.9292, g_loss: 1.4281, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [293/500], d_loss: 0.6899, g_loss: 1.6262, D(x): 0.79, D(G(z)): 0.30\n", + "Epoch [294/500], d_loss: 0.8661, g_loss: 1.5777, D(x): 0.67, D(G(z)): 0.26\n", + "Epoch [295/500], d_loss: 1.0175, g_loss: 1.3831, D(x): 0.73, D(G(z)): 0.37\n", + "Epoch [296/500], d_loss: 0.7845, g_loss: 1.4524, D(x): 0.74, D(G(z)): 0.31\n", + "Epoch [297/500], d_loss: 1.3365, g_loss: 1.0138, D(x): 0.60, D(G(z)): 0.41\n", + "Epoch [298/500], d_loss: 1.2213, g_loss: 0.9765, D(x): 0.72, D(G(z)): 0.48\n", + "Epoch [299/500], d_loss: 1.0286, g_loss: 1.1387, D(x): 0.76, D(G(z)): 0.44\n", + "Epoch [300/500], d_loss: 0.9002, g_loss: 1.4548, D(x): 0.69, D(G(z)): 0.30\n", + "Epoch [301/500], d_loss: 1.1122, g_loss: 1.3067, D(x): 0.67, D(G(z)): 0.37\n", + "Epoch [302/500], d_loss: 1.1475, g_loss: 1.1471, D(x): 0.67, D(G(z)): 0.41\n", + "Epoch [303/500], d_loss: 1.2457, g_loss: 1.1959, D(x): 0.61, D(G(z)): 0.40\n", + "Epoch [304/500], d_loss: 1.1493, g_loss: 1.4431, D(x): 0.61, D(G(z)): 0.33\n", + "Epoch [305/500], d_loss: 0.9298, g_loss: 1.6244, D(x): 0.67, D(G(z)): 0.28\n", + "Epoch [306/500], d_loss: 0.8033, g_loss: 1.4576, D(x): 0.73, D(G(z)): 0.30\n", + "Epoch [307/500], d_loss: 1.2370, g_loss: 0.9391, D(x): 0.68, D(G(z)): 0.46\n", + "Epoch [308/500], d_loss: 1.5486, g_loss: 1.1904, D(x): 0.52, D(G(z)): 0.41\n", + "Epoch [309/500], d_loss: 0.8454, g_loss: 1.6079, D(x): 0.74, D(G(z)): 0.31\n", + "Epoch [310/500], d_loss: 1.0735, g_loss: 1.2887, D(x): 0.68, D(G(z)): 0.38\n", + "Epoch [311/500], d_loss: 1.1446, g_loss: 1.4154, D(x): 0.63, D(G(z)): 0.33\n", + "Epoch [312/500], d_loss: 0.9862, g_loss: 1.3230, D(x): 0.70, D(G(z)): 0.35\n", + "Epoch [313/500], d_loss: 1.1652, g_loss: 1.4194, D(x): 0.57, D(G(z)): 0.31\n", + "Epoch [314/500], d_loss: 1.4465, g_loss: 1.0993, D(x): 0.55, D(G(z)): 0.45\n", + "Epoch [315/500], d_loss: 1.0327, g_loss: 1.3419, D(x): 0.66, D(G(z)): 0.34\n", + "Epoch [316/500], d_loss: 1.0768, g_loss: 1.2867, D(x): 0.68, D(G(z)): 0.39\n", + "Epoch [317/500], d_loss: 0.6920, g_loss: 1.7641, D(x): 0.74, D(G(z)): 0.24\n", + "Epoch [318/500], d_loss: 0.8647, g_loss: 1.6503, D(x): 0.68, D(G(z)): 0.27\n", + "Epoch [319/500], d_loss: 0.8730, g_loss: 1.6164, D(x): 0.68, D(G(z)): 0.29\n", + "Epoch [320/500], d_loss: 1.4413, g_loss: 1.2737, D(x): 0.54, D(G(z)): 0.38\n", + "Epoch [321/500], d_loss: 1.2462, g_loss: 1.1535, D(x): 0.64, D(G(z)): 0.41\n", + "Epoch [322/500], d_loss: 0.9958, g_loss: 1.3866, D(x): 0.69, D(G(z)): 0.35\n", + "Epoch [323/500], d_loss: 0.9565, g_loss: 1.3935, D(x): 0.69, D(G(z)): 0.33\n", + "Epoch [324/500], d_loss: 0.9096, g_loss: 1.4943, D(x): 0.71, D(G(z)): 0.33\n", + "Epoch [325/500], d_loss: 0.8305, g_loss: 1.4078, D(x): 0.73, D(G(z)): 0.32\n", + "Epoch [326/500], d_loss: 0.9054, g_loss: 1.3668, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [327/500], d_loss: 0.9955, g_loss: 1.3927, D(x): 0.64, D(G(z)): 0.31\n", + "Epoch [328/500], d_loss: 0.9439, g_loss: 1.5673, D(x): 0.66, D(G(z)): 0.31\n", + "Epoch [329/500], d_loss: 0.8906, g_loss: 1.3155, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [330/500], d_loss: 1.1716, g_loss: 1.3670, D(x): 0.62, D(G(z)): 0.35\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Epoch [92/100], Step [200/600], d_loss: 0.7545, g_loss: 1.8934, D(x): 0.72, D(G(z)): 0.19\n", - "Epoch [92/100], Step [400/600], d_loss: 0.8663, g_loss: 1.8379, D(x): 0.72, D(G(z)): 0.26\n", - "Epoch [92/100], Step [600/600], d_loss: 0.6683, g_loss: 2.8028, D(x): 0.74, D(G(z)): 0.14\n", - "Epoch [93/100], Step [200/600], d_loss: 0.8399, g_loss: 1.3640, D(x): 0.80, D(G(z)): 0.34\n", - "Epoch [93/100], Step [400/600], d_loss: 0.8975, g_loss: 1.6973, D(x): 0.69, D(G(z)): 0.22\n", - "Epoch [93/100], Step [600/600], d_loss: 0.8928, g_loss: 1.9126, D(x): 0.81, D(G(z)): 0.32\n", - "Epoch [94/100], Step [200/600], d_loss: 0.6605, g_loss: 1.5847, D(x): 0.83, D(G(z)): 0.28\n", - "Epoch [94/100], Step [400/600], d_loss: 1.0173, g_loss: 1.5576, D(x): 0.71, D(G(z)): 0.37\n", - "Epoch [94/100], Step [600/600], d_loss: 0.7937, g_loss: 2.1991, D(x): 0.76, D(G(z)): 0.25\n", - "Epoch [95/100], Step [200/600], d_loss: 0.7031, g_loss: 1.6621, D(x): 0.75, D(G(z)): 0.24\n", - "Epoch [95/100], Step [400/600], d_loss: 0.7104, g_loss: 2.2526, D(x): 0.74, D(G(z)): 0.22\n", - "Epoch [95/100], Step [600/600], d_loss: 0.5081, g_loss: 2.2593, D(x): 0.85, D(G(z)): 0.22\n", - "Epoch [96/100], Step [200/600], d_loss: 0.7492, g_loss: 2.2479, D(x): 0.80, D(G(z)): 0.26\n", - "Epoch [96/100], Step [400/600], d_loss: 0.9099, g_loss: 2.2725, D(x): 0.70, D(G(z)): 0.25\n", - "Epoch [96/100], Step [600/600], d_loss: 0.7022, g_loss: 2.4435, D(x): 0.74, D(G(z)): 0.17\n", - "Epoch [97/100], Step [200/600], d_loss: 0.9358, g_loss: 1.8359, D(x): 0.76, D(G(z)): 0.34\n", - "Epoch [97/100], Step [400/600], d_loss: 0.8493, g_loss: 1.9281, D(x): 0.70, D(G(z)): 0.24\n", - "Epoch [97/100], Step [600/600], d_loss: 1.1722, g_loss: 1.8786, D(x): 0.62, D(G(z)): 0.27\n", - "Epoch [98/100], Step [200/600], d_loss: 1.0150, g_loss: 1.7010, D(x): 0.74, D(G(z)): 0.36\n", - "Epoch [98/100], Step [400/600], d_loss: 0.7811, g_loss: 1.6953, D(x): 0.77, D(G(z)): 0.26\n", - "Epoch [98/100], Step [600/600], d_loss: 1.0406, g_loss: 1.6888, D(x): 0.69, D(G(z)): 0.30\n", - "Epoch [99/100], Step [200/600], d_loss: 0.6473, g_loss: 1.8382, D(x): 0.76, D(G(z)): 0.22\n", - "Epoch [99/100], Step [400/600], d_loss: 0.6765, g_loss: 2.1369, D(x): 0.72, D(G(z)): 0.18\n", - "Epoch [99/100], Step [600/600], d_loss: 0.6515, g_loss: 2.3907, D(x): 0.79, D(G(z)): 0.24\n" + "Epoch [331/500], d_loss: 0.9890, g_loss: 1.1310, D(x): 0.71, D(G(z)): 0.40\n", + "Epoch [332/500], d_loss: 0.7404, g_loss: 1.7732, D(x): 0.70, D(G(z)): 0.23\n", + "Epoch [333/500], d_loss: 1.0005, g_loss: 1.3604, D(x): 0.69, D(G(z)): 0.36\n", + "Epoch [334/500], d_loss: 1.4437, g_loss: 1.0994, D(x): 0.53, D(G(z)): 0.40\n", + "Epoch [335/500], d_loss: 0.8868, g_loss: 1.3829, D(x): 0.73, D(G(z)): 0.35\n", + "Epoch [336/500], d_loss: 0.8635, g_loss: 1.4343, D(x): 0.70, D(G(z)): 0.31\n", + "Epoch [337/500], d_loss: 1.1889, g_loss: 1.3717, D(x): 0.56, D(G(z)): 0.30\n", + "Epoch [338/500], d_loss: 1.1880, g_loss: 1.2512, D(x): 0.64, D(G(z)): 0.40\n", + "Epoch [339/500], d_loss: 1.1491, g_loss: 1.3130, D(x): 0.60, D(G(z)): 0.33\n", + "Epoch [340/500], d_loss: 0.9684, g_loss: 1.3494, D(x): 0.67, D(G(z)): 0.33\n", + "Epoch [341/500], d_loss: 0.7114, g_loss: 1.9377, D(x): 0.75, D(G(z)): 0.28\n", + "Epoch [342/500], d_loss: 1.1577, g_loss: 1.5487, D(x): 0.59, D(G(z)): 0.33\n", + "Epoch [343/500], d_loss: 1.2421, g_loss: 1.2815, D(x): 0.59, D(G(z)): 0.36\n", + "Epoch [344/500], d_loss: 1.5192, g_loss: 1.1339, D(x): 0.56, D(G(z)): 0.42\n", + "Epoch [345/500], d_loss: 1.1391, g_loss: 1.4646, D(x): 0.59, D(G(z)): 0.31\n", + "Epoch [346/500], d_loss: 0.9848, g_loss: 1.4129, D(x): 0.65, D(G(z)): 0.32\n", + "Epoch [347/500], d_loss: 0.9760, g_loss: 1.2992, D(x): 0.69, D(G(z)): 0.35\n", + "Epoch [348/500], d_loss: 0.8867, g_loss: 1.4819, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [349/500], d_loss: 0.9625, g_loss: 1.3399, D(x): 0.67, D(G(z)): 0.33\n", + "Epoch [350/500], d_loss: 0.6653, g_loss: 1.8901, D(x): 0.76, D(G(z)): 0.24\n", + "Epoch [351/500], d_loss: 1.1837, g_loss: 1.1973, D(x): 0.56, D(G(z)): 0.33\n", + "Epoch [352/500], d_loss: 1.0042, g_loss: 1.3640, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [353/500], d_loss: 1.1688, g_loss: 1.1871, D(x): 0.66, D(G(z)): 0.39\n", + "Epoch [354/500], d_loss: 0.9641, g_loss: 1.2665, D(x): 0.68, D(G(z)): 0.34\n", + "Epoch [355/500], d_loss: 1.0132, g_loss: 1.2459, D(x): 0.71, D(G(z)): 0.38\n", + "Epoch [356/500], d_loss: 1.0252, g_loss: 1.4572, D(x): 0.62, D(G(z)): 0.30\n", + "Epoch [357/500], d_loss: 1.3188, g_loss: 1.5916, D(x): 0.57, D(G(z)): 0.34\n", + "Epoch [358/500], d_loss: 0.9871, g_loss: 1.2807, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [359/500], d_loss: 1.1308, g_loss: 1.0376, D(x): 0.67, D(G(z)): 0.42\n", + "Epoch [360/500], d_loss: 0.9130, g_loss: 1.5355, D(x): 0.68, D(G(z)): 0.31\n", + "Epoch [361/500], d_loss: 0.8521, g_loss: 1.4329, D(x): 0.69, D(G(z)): 0.31\n", + "Epoch [362/500], d_loss: 1.0159, g_loss: 1.3386, D(x): 0.67, D(G(z)): 0.35\n", + "Epoch [363/500], d_loss: 0.8467, g_loss: 1.4175, D(x): 0.75, D(G(z)): 0.34\n", + "Epoch [364/500], d_loss: 0.9966, g_loss: 1.4507, D(x): 0.68, D(G(z)): 0.34\n", + "Epoch [365/500], d_loss: 0.9616, g_loss: 1.3088, D(x): 0.70, D(G(z)): 0.37\n", + "Epoch [366/500], d_loss: 1.2030, g_loss: 1.1873, D(x): 0.64, D(G(z)): 0.40\n", + "Epoch [367/500], d_loss: 1.1350, g_loss: 1.2801, D(x): 0.62, D(G(z)): 0.34\n", + "Epoch [368/500], d_loss: 1.2942, g_loss: 1.2793, D(x): 0.58, D(G(z)): 0.38\n", + "Epoch [369/500], d_loss: 1.0333, g_loss: 1.2481, D(x): 0.68, D(G(z)): 0.35\n", + "Epoch [370/500], d_loss: 0.8556, g_loss: 1.3765, D(x): 0.71, D(G(z)): 0.31\n", + "Epoch [371/500], d_loss: 1.0475, g_loss: 1.3647, D(x): 0.64, D(G(z)): 0.33\n", + "Epoch [372/500], d_loss: 0.8947, g_loss: 1.5077, D(x): 0.67, D(G(z)): 0.28\n", + "Epoch [373/500], d_loss: 0.8803, g_loss: 1.6807, D(x): 0.67, D(G(z)): 0.29\n", + "Epoch [374/500], d_loss: 0.8584, g_loss: 1.5462, D(x): 0.69, D(G(z)): 0.29\n", + "Epoch [375/500], d_loss: 1.1454, g_loss: 1.3373, D(x): 0.62, D(G(z)): 0.33\n", + "Epoch [376/500], d_loss: 1.0556, g_loss: 1.4145, D(x): 0.65, D(G(z)): 0.32\n", + "Epoch [377/500], d_loss: 0.7642, g_loss: 1.5628, D(x): 0.73, D(G(z)): 0.28\n", + "Epoch [378/500], d_loss: 0.7733, g_loss: 1.5946, D(x): 0.72, D(G(z)): 0.28\n", + "Epoch [379/500], d_loss: 0.7441, g_loss: 1.5352, D(x): 0.76, D(G(z)): 0.30\n", + "Epoch [380/500], d_loss: 0.9546, g_loss: 1.5265, D(x): 0.64, D(G(z)): 0.29\n", + "Epoch [381/500], d_loss: 1.2293, g_loss: 1.3148, D(x): 0.60, D(G(z)): 0.38\n", + "Epoch [382/500], d_loss: 0.9948, g_loss: 1.4577, D(x): 0.65, D(G(z)): 0.32\n", + "Epoch [383/500], d_loss: 0.8905, g_loss: 1.1919, D(x): 0.72, D(G(z)): 0.35\n", + "Epoch [384/500], d_loss: 1.0942, g_loss: 1.1836, D(x): 0.65, D(G(z)): 0.38\n", + "Epoch [385/500], d_loss: 1.0047, g_loss: 1.2332, D(x): 0.67, D(G(z)): 0.35\n", + "Epoch [386/500], d_loss: 0.6492, g_loss: 1.9882, D(x): 0.73, D(G(z)): 0.20\n", + "Epoch [387/500], d_loss: 0.9920, g_loss: 1.3929, D(x): 0.68, D(G(z)): 0.33\n", + "Epoch [388/500], d_loss: 0.9468, g_loss: 1.3731, D(x): 0.70, D(G(z)): 0.35\n", + "Epoch [389/500], d_loss: 0.8586, g_loss: 1.4711, D(x): 0.73, D(G(z)): 0.32\n", + "Epoch [390/500], d_loss: 0.9333, g_loss: 1.3937, D(x): 0.70, D(G(z)): 0.33\n", + "Epoch [391/500], d_loss: 0.7423, g_loss: 1.4893, D(x): 0.75, D(G(z)): 0.30\n", + "Epoch [392/500], d_loss: 1.0568, g_loss: 1.2860, D(x): 0.71, D(G(z)): 0.40\n", + "Epoch [393/500], d_loss: 0.9643, g_loss: 1.5547, D(x): 0.68, D(G(z)): 0.32\n", + "Epoch [394/500], d_loss: 1.0159, g_loss: 1.2738, D(x): 0.66, D(G(z)): 0.35\n", + "Epoch [395/500], d_loss: 0.8376, g_loss: 1.6581, D(x): 0.66, D(G(z)): 0.25\n", + "Epoch [396/500], d_loss: 0.8904, g_loss: 1.3796, D(x): 0.70, D(G(z)): 0.31\n", + "Epoch [397/500], d_loss: 1.0001, g_loss: 1.3741, D(x): 0.73, D(G(z)): 0.36\n", + "Epoch [398/500], d_loss: 1.1209, g_loss: 1.2489, D(x): 0.68, D(G(z)): 0.40\n", + "Epoch [399/500], d_loss: 0.9304, g_loss: 1.3726, D(x): 0.71, D(G(z)): 0.34\n", + "Epoch [400/500], d_loss: 0.8815, g_loss: 1.4302, D(x): 0.73, D(G(z)): 0.33\n", + "Epoch [401/500], d_loss: 0.9377, g_loss: 1.3715, D(x): 0.68, D(G(z)): 0.32\n", + "Epoch [402/500], d_loss: 0.7078, g_loss: 1.8294, D(x): 0.71, D(G(z)): 0.22\n", + "Epoch [403/500], d_loss: 1.2391, g_loss: 1.2322, D(x): 0.63, D(G(z)): 0.40\n", + "Epoch [404/500], d_loss: 1.0929, g_loss: 1.2300, D(x): 0.68, D(G(z)): 0.39\n", + "Epoch [405/500], d_loss: 0.8144, g_loss: 1.7674, D(x): 0.67, D(G(z)): 0.24\n", + "Epoch [406/500], d_loss: 0.7993, g_loss: 1.6325, D(x): 0.72, D(G(z)): 0.28\n", + "Epoch [407/500], d_loss: 0.9045, g_loss: 1.3674, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [408/500], d_loss: 0.8537, g_loss: 1.6401, D(x): 0.74, D(G(z)): 0.31\n", + "Epoch [409/500], d_loss: 1.1500, g_loss: 1.1132, D(x): 0.61, D(G(z)): 0.37\n", + "Epoch [410/500], d_loss: 0.8872, g_loss: 1.4441, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [411/500], d_loss: 1.0593, g_loss: 1.1613, D(x): 0.68, D(G(z)): 0.39\n", + "Epoch [412/500], d_loss: 0.9554, g_loss: 1.4496, D(x): 0.66, D(G(z)): 0.30\n", + "Epoch [413/500], d_loss: 0.8683, g_loss: 1.6163, D(x): 0.65, D(G(z)): 0.25\n", + "Epoch [414/500], d_loss: 1.0911, g_loss: 1.1204, D(x): 0.68, D(G(z)): 0.40\n", + "Epoch [415/500], d_loss: 1.0169, g_loss: 1.2848, D(x): 0.65, D(G(z)): 0.35\n", + "Epoch [416/500], d_loss: 1.0138, g_loss: 1.3946, D(x): 0.65, D(G(z)): 0.33\n", + "Epoch [417/500], d_loss: 0.6403, g_loss: 1.9158, D(x): 0.77, D(G(z)): 0.23\n", + "Epoch [418/500], d_loss: 1.1670, g_loss: 1.2530, D(x): 0.63, D(G(z)): 0.37\n", + "Epoch [419/500], d_loss: 1.0041, g_loss: 1.2034, D(x): 0.70, D(G(z)): 0.38\n", + "Epoch [420/500], d_loss: 0.8330, g_loss: 1.5206, D(x): 0.69, D(G(z)): 0.28\n", + "Epoch [421/500], d_loss: 1.4888, g_loss: 1.0655, D(x): 0.61, D(G(z)): 0.45\n", + "Epoch [422/500], d_loss: 0.9200, g_loss: 1.7023, D(x): 0.66, D(G(z)): 0.28\n", + "Epoch [423/500], d_loss: 1.0848, g_loss: 1.2976, D(x): 0.66, D(G(z)): 0.37\n", + "Epoch [424/500], d_loss: 1.1389, g_loss: 1.2427, D(x): 0.63, D(G(z)): 0.38\n", + "Epoch [425/500], d_loss: 1.0085, g_loss: 1.2681, D(x): 0.72, D(G(z)): 0.39\n", + "Epoch [426/500], d_loss: 1.2738, g_loss: 1.1429, D(x): 0.64, D(G(z)): 0.44\n", + "Epoch [427/500], d_loss: 0.9562, g_loss: 1.2328, D(x): 0.73, D(G(z)): 0.38\n", + "Epoch [428/500], d_loss: 0.7374, g_loss: 1.7853, D(x): 0.72, D(G(z)): 0.23\n", + "Epoch [429/500], d_loss: 1.2673, g_loss: 1.1130, D(x): 0.64, D(G(z)): 0.44\n", + "Epoch [430/500], d_loss: 1.0077, g_loss: 1.2166, D(x): 0.68, D(G(z)): 0.37\n", + "Epoch [431/500], d_loss: 0.6839, g_loss: 1.7437, D(x): 0.77, D(G(z)): 0.27\n", + "Epoch [432/500], d_loss: 0.8532, g_loss: 1.4344, D(x): 0.73, D(G(z)): 0.33\n", + "Epoch [433/500], d_loss: 0.8013, g_loss: 1.6722, D(x): 0.72, D(G(z)): 0.26\n", + "Epoch [434/500], d_loss: 0.8339, g_loss: 1.6689, D(x): 0.71, D(G(z)): 0.28\n", + "Epoch [435/500], d_loss: 1.3025, g_loss: 1.4316, D(x): 0.56, D(G(z)): 0.32\n", + "Epoch [436/500], d_loss: 1.0284, g_loss: 1.2806, D(x): 0.68, D(G(z)): 0.37\n", + "Epoch [437/500], d_loss: 1.0547, g_loss: 1.2641, D(x): 0.61, D(G(z)): 0.34\n", + "Epoch [438/500], d_loss: 1.3314, g_loss: 1.1603, D(x): 0.57, D(G(z)): 0.41\n", + "Epoch [439/500], d_loss: 1.1614, g_loss: 1.1311, D(x): 0.63, D(G(z)): 0.40\n", + "Epoch [440/500], d_loss: 1.0241, g_loss: 1.5066, D(x): 0.65, D(G(z)): 0.30\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch [441/500], d_loss: 0.9420, g_loss: 1.2448, D(x): 0.69, D(G(z)): 0.36\n", + "Epoch [442/500], d_loss: 0.8875, g_loss: 1.5646, D(x): 0.71, D(G(z)): 0.32\n", + "Epoch [443/500], d_loss: 0.9400, g_loss: 1.2521, D(x): 0.70, D(G(z)): 0.33\n", + "Epoch [444/500], d_loss: 1.1099, g_loss: 1.2434, D(x): 0.68, D(G(z)): 0.37\n", + "Epoch [445/500], d_loss: 1.2045, g_loss: 1.3568, D(x): 0.59, D(G(z)): 0.34\n", + "Epoch [446/500], d_loss: 0.8978, g_loss: 1.3511, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [447/500], d_loss: 0.7988, g_loss: 1.5881, D(x): 0.72, D(G(z)): 0.28\n", + "Epoch [448/500], d_loss: 0.8578, g_loss: 1.3999, D(x): 0.76, D(G(z)): 0.34\n", + "Epoch [449/500], d_loss: 0.9504, g_loss: 1.3742, D(x): 0.72, D(G(z)): 0.35\n", + "Epoch [450/500], d_loss: 1.2930, g_loss: 1.1855, D(x): 0.61, D(G(z)): 0.40\n", + "Epoch [451/500], d_loss: 1.2956, g_loss: 1.1676, D(x): 0.64, D(G(z)): 0.45\n", + "Epoch [452/500], d_loss: 0.9261, g_loss: 1.4396, D(x): 0.67, D(G(z)): 0.31\n", + "Epoch [453/500], d_loss: 1.2335, g_loss: 1.1853, D(x): 0.62, D(G(z)): 0.40\n", + "Epoch [454/500], d_loss: 1.2923, g_loss: 1.2196, D(x): 0.55, D(G(z)): 0.37\n", + "Epoch [455/500], d_loss: 0.9682, g_loss: 1.3824, D(x): 0.69, D(G(z)): 0.33\n", + "Epoch [456/500], d_loss: 1.0812, g_loss: 1.3144, D(x): 0.68, D(G(z)): 0.38\n", + "Epoch [457/500], d_loss: 1.0000, g_loss: 1.6382, D(x): 0.59, D(G(z)): 0.26\n", + "Epoch [458/500], d_loss: 0.8316, g_loss: 1.6093, D(x): 0.69, D(G(z)): 0.26\n", + "Epoch [459/500], d_loss: 0.7658, g_loss: 1.8536, D(x): 0.72, D(G(z)): 0.24\n", + "Epoch [460/500], d_loss: 0.8877, g_loss: 1.3639, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [461/500], d_loss: 1.2193, g_loss: 1.1515, D(x): 0.61, D(G(z)): 0.38\n", + "Epoch [462/500], d_loss: 0.9441, g_loss: 1.6214, D(x): 0.62, D(G(z)): 0.26\n", + "Epoch [463/500], d_loss: 0.8721, g_loss: 1.4527, D(x): 0.70, D(G(z)): 0.32\n", + "Epoch [464/500], d_loss: 0.7644, g_loss: 1.6146, D(x): 0.72, D(G(z)): 0.28\n", + "Epoch [465/500], d_loss: 0.8092, g_loss: 1.8323, D(x): 0.70, D(G(z)): 0.25\n", + "Epoch [466/500], d_loss: 1.0277, g_loss: 1.5180, D(x): 0.62, D(G(z)): 0.30\n", + "Epoch [467/500], d_loss: 1.3432, g_loss: 1.2022, D(x): 0.62, D(G(z)): 0.42\n", + "Epoch [468/500], d_loss: 1.0822, g_loss: 1.4048, D(x): 0.65, D(G(z)): 0.37\n", + "Epoch [469/500], d_loss: 0.8350, g_loss: 1.4815, D(x): 0.71, D(G(z)): 0.29\n", + "Epoch [470/500], d_loss: 0.9572, g_loss: 1.4335, D(x): 0.66, D(G(z)): 0.30\n", + "Epoch [471/500], d_loss: 1.0063, g_loss: 1.3566, D(x): 0.66, D(G(z)): 0.34\n", + "Epoch [472/500], d_loss: 0.9418, g_loss: 1.6834, D(x): 0.67, D(G(z)): 0.29\n", + "Epoch [473/500], d_loss: 1.0807, g_loss: 1.2172, D(x): 0.65, D(G(z)): 0.37\n", + "Epoch [474/500], d_loss: 1.0318, g_loss: 1.3176, D(x): 0.63, D(G(z)): 0.34\n", + "Epoch [475/500], d_loss: 1.0777, g_loss: 1.1631, D(x): 0.69, D(G(z)): 0.39\n", + "Epoch [476/500], d_loss: 0.7117, g_loss: 1.6953, D(x): 0.75, D(G(z)): 0.27\n", + "Epoch [477/500], d_loss: 0.8447, g_loss: 1.5539, D(x): 0.70, D(G(z)): 0.28\n", + "Epoch [478/500], d_loss: 0.8978, g_loss: 1.4826, D(x): 0.66, D(G(z)): 0.28\n", + "Epoch [479/500], d_loss: 0.8325, g_loss: 1.6255, D(x): 0.68, D(G(z)): 0.25\n", + "Epoch [480/500], d_loss: 0.6100, g_loss: 1.9152, D(x): 0.80, D(G(z)): 0.24\n", + "Epoch [481/500], d_loss: 1.0318, g_loss: 1.2833, D(x): 0.70, D(G(z)): 0.37\n", + "Epoch [482/500], d_loss: 0.7879, g_loss: 1.5580, D(x): 0.73, D(G(z)): 0.28\n", + "Epoch [483/500], d_loss: 0.7703, g_loss: 1.5651, D(x): 0.74, D(G(z)): 0.30\n", + "Epoch [484/500], d_loss: 1.1391, g_loss: 1.4132, D(x): 0.63, D(G(z)): 0.34\n", + "Epoch [485/500], d_loss: 0.9411, g_loss: 1.5307, D(x): 0.65, D(G(z)): 0.27\n", + "Epoch [486/500], d_loss: 1.0826, g_loss: 1.3360, D(x): 0.65, D(G(z)): 0.36\n", + "Epoch [487/500], d_loss: 0.8969, g_loss: 1.3325, D(x): 0.68, D(G(z)): 0.32\n", + "Epoch [488/500], d_loss: 0.9951, g_loss: 1.2462, D(x): 0.72, D(G(z)): 0.38\n", + "Epoch [489/500], d_loss: 0.9283, g_loss: 1.3946, D(x): 0.69, D(G(z)): 0.32\n", + "Epoch [490/500], d_loss: 1.1680, g_loss: 1.1607, D(x): 0.68, D(G(z)): 0.42\n", + "Epoch [491/500], d_loss: 1.2781, g_loss: 1.2009, D(x): 0.62, D(G(z)): 0.39\n", + "Epoch [492/500], d_loss: 0.9245, g_loss: 1.6082, D(x): 0.66, D(G(z)): 0.28\n", + "Epoch [493/500], d_loss: 0.8367, g_loss: 1.6735, D(x): 0.69, D(G(z)): 0.27\n", + "Epoch [494/500], d_loss: 1.1558, g_loss: 1.1857, D(x): 0.65, D(G(z)): 0.39\n", + "Epoch [495/500], d_loss: 1.2399, g_loss: 1.2862, D(x): 0.59, D(G(z)): 0.36\n", + "Epoch [496/500], d_loss: 1.0204, g_loss: 1.4843, D(x): 0.68, D(G(z)): 0.33\n", + "Epoch [497/500], d_loss: 1.0776, g_loss: 1.4313, D(x): 0.64, D(G(z)): 0.33\n", + "Epoch [498/500], d_loss: 1.2960, g_loss: 1.1681, D(x): 0.60, D(G(z)): 0.40\n", + "Epoch [499/500], d_loss: 0.7900, g_loss: 1.6450, D(x): 0.72, D(G(z)): 0.27\n" ] } ], @@ -583,63 +704,187 @@ " for i, (images, _) in enumerate(train_loader):\n", " images = images.reshape(BATCH_SIZE, -1).to(DEVICE)\n", " \n", - " # Create the labels which are later used as input for the BCE loss\n", + " # '진짜'와 '가짜' 레이블 생성\n", " real_labels = torch.ones(BATCH_SIZE, 1).to(DEVICE)\n", " fake_labels = torch.zeros(BATCH_SIZE, 1).to(DEVICE)\n", - "\n", - " # Train Discriminator\n", - "\n", - " # Compute BCE_Loss using real images where BCE_Loss(x, y): - y * log(D(x)) - (1-y) * log(1 - D(x))\n", - " # Second term of the loss is always zero since real_labels == 1\n", + " \n", + " # 판별자가 진짜 이미지를 진짜로 인식하는 오차를 예산\n", " outputs = D(images)\n", " d_loss_real = criterion(outputs, real_labels)\n", " real_score = outputs\n", " \n", - " # Compute BCELoss using fake images\n", - " # First term of the loss is always zero since fake_labels == 0\n", + " # 무작위 텐서로 가짜 이미지 생성\n", " z = torch.randn(BATCH_SIZE, 64).to(DEVICE)\n", " fake_images = G(z)\n", + " \n", + " # 판별자가 가짜 이미지를 가짜로 인식하는 오차를 계산\n", " outputs = D(fake_images)\n", " d_loss_fake = criterion(outputs, fake_labels)\n", " fake_score = outputs\n", " \n", - " # Backprop and optimize\n", + " # 진짜와 가짜 이미지를 갖고 낸 오차를 더해서 판별자의 오차를 계산 후 학습\n", " d_loss = d_loss_real + d_loss_fake\n", " d_optimizer.zero_grad()\n", " d_loss.backward()\n", " d_optimizer.step()\n", " \n", - " # Train Generator\n", - "\n", - " # Compute loss with fake images\n", - " z = torch.randn(BATCH_SIZE, 64).to(DEVICE)\n", + " # 생성자가 판별자를 속였는지에 대한 오차를 계산\n", " fake_images = G(z)\n", " outputs = D(fake_images)\n", - " \n", - " # We train G to maximize log(D(G(z)) instead of minimizing log(1-D(G(z)))\n", - " # For the reason, see the last paragraph of section 3. https://arxiv.org/pdf/1406.2661.pdf\n", " g_loss = criterion(outputs, real_labels)\n", " \n", - " # Backprop and optimize\n", + " # 역전파 알고리즘으로 생성자 모델의 학습을 진행\n", " d_optimizer.zero_grad()\n", " g_optimizer.zero_grad()\n", " g_loss.backward()\n", " g_optimizer.step()\n", " \n", - " if (i+1) % 200 == 0:\n", - " print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", - " .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), \n", - " real_score.mean().item(), fake_score.mean().item()))" + " # 학습 진행 알아보기\n", + " print('Epoch [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", + " .format(epoch, EPOCHS, d_loss.item(), g_loss.item(), \n", + " real_score.mean().item(), fake_score.mean().item()))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## 참고\n", - "본 튜토리얼은 다음 자료를 참고하여 만들어졌습니다.\n", - "\n", - "* [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License" + "학습이 끝난 생성자의 결과물을 한번 확인해 보겠습니다." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAEgNJREFUeJzt3WuM1FWax/HfY3NVBLlo00FWdCSaEQSl1VWJusw6KFFwEmP0jWycDPNCkh1jzBr3xZpsTCabnTHzahImGmEdHTdBIokTHdfL4uo6chEEVK5ppbGhpbkIcm+ffdHFpsT+P6fpqq4qPN9PQrq6njpVp6v7x7+qzvmfY+4uAPk5p94dAFAfhB/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBTg2r5YGbGdEJggLm79eV2FR35zewOM9tkZlvN7PFK7gtAbVl/5/abWZOkzZJul9QuaaWkB9z9k6ANR35ggNXiyH+9pK3uvt3dj0v6k6R5FdwfgBqqJPwTJO0o+769dN13mNkCM1tlZqsqeCwAVTbgH/i5+yJJiyRe9gONpJIj/05JE8u+v7h0HYCzQCXhXylpspldamZDJN0vaXl1ugVgoPX7Zb+7nzSzhZJel9Qk6Vl331i1ngEYUP0e6uvXg/GeHxhwNZnkA+DsRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyVdMtuoFy06dPD+tdXV1hvbOzM6wfO3bsjPuUE478QKYIP5Apwg9kivADmSL8QKYIP5Apwg9kqqJxfjNrk3RQUrekk+7eWo1O4YfDrHjD2HHjxoVtH3300bCear9w4cLC2q5du8K248ePD+vbtm0L69HPLUmDBw8urJ04cSJsW62dtasxyefv3H1PFe4HQA3xsh/IVKXhd0l/MbPVZragGh0CUBuVvuyf6e47zewiSW+Y2WfuvqL8BqX/FPiPAWgwFR353X1n6WunpGWSru/lNovcvZUPA4HG0u/wm9l5Znb+qcuSfippQ7U6BmBgVfKyv1nSstKQxiBJL7j7a1XpFYAB1+/wu/t2SdOq2Bf8AEXj3anz+VP1c86JX7iOHj26sNbd3R22Tc0DuPDCC8P6/v37w3r0vAwfPjxse/jw4bDeVwz1AZki/ECmCD+QKcIPZIrwA5ki/ECmWLobA2rEiBGFtdSpqUePHg3rqVNfn3766cJaalnvV199Nax/+eWXYb29vT2st7S0FNaiIUpJWrJkSWHt+PHjYdtyHPmBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4/yoSGqJ6scee6ywNnXq1LDtqFGjwvq6devCenNzc2Ft9erVYdt77703rKfmCaRO6Y22H69kfsOZLOvNkR/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwxzo+KXHrppWF9xowZhbU9e+LNnXfs2BHWU9tob9q0qbB2//33h223b98e1idNmhTWv/nmm7C+cePGwtq0afGK+LNnzy6svffee2Hbchz5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IVHKc38yelXSXpE53n1K6boyklyRNktQm6T533zdw3US9NDU1hfVHHnkkrI8dO7aw1tbWFrY9dOhQWE+tbx+Nxe/cuTNsu3nz5rCemmMwZsyYsH7zzTcX1lLbf7///vuFtdT8gnJ9OfI/J+mO0657XNKb7j5Z0pul7wGcRZLhd/cVkvaedvU8SYtLlxdLuqfK/QIwwPr7nr/Z3TtKl3dJKl4vCUBDqnhuv7u7mRUuHGZmCyQtqPRxAFRXf4/8u82sRZJKXzuLbujui9y91d1b+/lYAAZAf8O/XNL80uX5kl6pTncA1Eoy/Gb2oqT/lXSFmbWb2c8l/VrS7Wa2RdLfl74HcBZJvud39wcKSj+pcl/QgKLxaEm68cYbw/q+fcXTPyZOnBi2HTJkSFjv7u4O6yNHjiys7d17+gDWd6V+rksuuSSsL1u2LKxHcxBOnjzZ77ap+QnlmOEHZIrwA5ki/ECmCD+QKcIPZIrwA5li6e7MpU49nTVrVlhPbXV94MCBwtrQoUMreuxoKE+Kl8dOLTmeOuU3NUzZ2hpPaI1OdV66dGnYdv369YW1b7/9NmxbjiM/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZYpz/B+6iiy4K64sXLw7rw4YNC+snTpwI6+3t7YW11Jh0V1dXWD/nnPjYFdWbm+NlJ1NzENauXRvWU3MQIqlTld0LV807Ixz5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IFOP8ZwEzC+sTJkworN15551h23HjxoX1UaNGhfVdu3aF9WnTphXWvvjii7DtkSNHwvoFF1zQ73pqjkFqq+to+WxJ6ujoCOvROgqpLboHDx5cWEvNuyjHkR/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUxZ6txgM3tW0l2SOt19Sum6JyX9QtJXpZs94e5/Tj6YWXVORP6BOffcc8N6tMa7JF111VWFtZtuuilsmxpLv+uuu8L66NGjw/q2bdsKa01NTWHbaP6CJA0fPjysR2Peqcc+evRoWE/Nbxg0KJ5CM2XKlH63nTt3bmFt3bp1OnToUDwxpKQvR/7nJN3Ry/VPu/v00r9k8AE0lmT43X2FpL016AuAGqrkPf9CM/vYzJ41s/i1H4CG09/w/17SjyRNl9Qh6TdFNzSzBWa2ysxW9fOxAAyAfoXf3Xe7e7e7fyvpD5KuD267yN1b3T3euRBATfUr/GbWUvbtzyRtqE53ANRK8pReM3tR0m2SxplZu6R/kXSbmU2X5JLaJP1yAPsIYAAkw+/uD/Ry9TMD0JcBlRrXTZ3fHa0BnxqXPXnyZFhP7RWfGqu/4oorCmuXXXZZ2Dbaw15Kn9d+8cUXh/VoH/vUuefnn39+WE+tcxC1P3bsWNi2paUlrO/bty+sp/YU6OzsLKyl/l5mz55dWGtrawvblmOGH5Apwg9kivADmSL8QKYIP5Apwg9kquZLd0dDbqllpKPhldRyx6nhsrfeeiusR6euXnvttWHb1DBjarvohx56KKzv2bOnsLZ169aw7W233dbv+5akr7/+OqyPGDGisPbVV18V1qT00t7RMKIkffbZZ4W11BbaK1euDOupJc1Tw3XHjx8vrKWGft95553C2sGDB8O25TjyA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+QqZqP80enYaa2PZ4+fXphraurK2w7Z86csJ567GgZ6dTpoa2t8SJGqfkN3d3dYT06NTZ1ym3qvqMttqX0PIBom+zUz51y4MCBsB6Nl69YsSJsm1o2fMiQIWE9+luV4jkOqXH+oUOHFtZSpzmX48gPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECmajrOP3To0HDcefHixWH7LVu2FNZSWypH50BL6bHVM1kS+XQffPBBWE/1/dZbbw3rw4YNK6ylliRPne+/Y8eOsD5jxoywHo2Hp9Y5WLduXVhPbW0ePa+pcfiOjo6wnlprILVGQ7TF9+7du8O20bbqqd93OY78QKYIP5Apwg9kivADmSL8QKYIP5Apwg9kKjnOb2YTJS2R1CzJJS1y99+Z2RhJL0maJKlN0n3uHu5b3NTUFK5/v3z58rAv1113XWHt6quvDtumzr9OnVserbOeWvP/8OHDYT36uaT4nHhJ2rBhQ2Ft8ODBYdvUePU111wT1ocPHx7Wo/Hsjz76KGwb/a1I0ueffx7Wo22yU7+TyZMnh/XUOgYvvfRSv+8/tSfALbfcUljbvHlz2LZcX478JyU96u4/lvS3kh42sx9LelzSm+4+WdKbpe8BnCWS4Xf3DndfU7p8UNKnkiZImifp1JS8xZLuGahOAqi+M3rPb2aTJF0j6a+Smt391BzIXep5WwDgLNHn8JvZCElLJf3K3b+zQZu7u3o+D+it3QIzW2Vmq1L7lwGonT6F38wGqyf4f3T3l0tX7zazllK9RVJnb23dfZG7t7p766BBNV8vFECBZPitZznQZyR96u6/LSstlzS/dHm+pFeq3z0AA8V6XrEHNzCbKeldSeslnTpf8An1vO//T0l/I+lz9Qz17Y3uq6mpyaPTMFPLTN9zT/Fnig8++GDYNjUslHpLsmnTpsLaDTfcELbdv39/WE8tnz1+/PiwHv0OP/zww7DtmDFjwvq8efPCeupnGzt2bGFt797wzyVZT/3tRsN50dCtlD5dOFo+W0r/vb399tuFtVmzZoVtX3jhhcLa66+/rq6urj6t3518He7u/yOp6M5+0pcHAdB4mOEHZIrwA5ki/ECmCD+QKcIPZIrwA5lKjvNX9cHMPFquuZLTblPj/KnTJK+88sqwHo3bvvvuu2HbqVOnhvXUMtKpJa7b29sLa6mtplPbXKdmZabmCUTLUK9ZsyZse/DgwbAezSGQpMsvv7ywllqyPJWLuXPnhvWnnnoqrEd/E4cOHQrbPvfcc4W1w4cPq7u7u0/j/Bz5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IVM3H+RP1Su47rKfGylPPQ3R+d2qL7bvvvjusp7bwTm0fHs1hSC0xvW3btrCe2vI5tbx2V1dXYS21fPaZbDfdm4cffriw9tprr1V03zNnzgzrqXkEUT21VkC0HPrRo0cZ5wcQI/xApgg/kCnCD2SK8AOZIvxApgg/kKmGGufvQ/vCWmq8OTWmnDpvPVpbPzXOn5pjkJL6HVXyO5w0aVJYT60/nxqLj57X1NyM1Nr60RbcKSNHjgzrqXUOUlJrDURzP55//vmw7YkTJ8K6uzPOD6AY4QcyRfiBTBF+IFOEH8gU4QcyRfiBTCXH+c1soqQlkpoluaRF7v47M3tS0i8kfVW66RPu/ufEfdVuUkGVRWPKlY7DDx8+PKwfOXIkrEcqmb8gpc8tT81xqERqHkAt56icqdQchKjvqbap31lfx/njv4weJyU96u5rzOx8SavN7I1S7Wl3//e+PBCAxpIMv7t3SOooXT5oZp9KireBAdDwzug9v5lNknSNpL+WrlpoZh+b2bNm1uv8WjNbYGarzGxVRT0FUFV9nttvZiMk/bekp9z9ZTNrlrRHPZ8D/KukFnd/KHEfjfsmLYH3/L3jPX/vzob3/H068pvZYElLJf3R3V8uPcBud+92928l/UHS9X25LwCNIRl+6/nv9xlJn7r7b8uubym72c8kbah+9wAMlL4M9c2U9K6k9ZJOnb/5hKQHJE1Xz8v+Nkm/LH04GN1X475OA34g+vqy/6w6nx9AGufzAwgRfiBThB/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBTfVm9t5r2SCrf83lc6bpG1Kh9a9R+SfStv6rZt0v6esOans//vQc3W+XurXXrQKBR+9ao/ZLoW3/Vq2+87AcyRfiBTNU7/Ivq/PiRRu1bo/ZLom/9VZe+1fU9P4D6qfeRH0Cd1CX8ZnaHmW0ys61m9ng9+lDEzNrMbL2Zra33FmOlbdA6zWxD2XVjzOwNM9tS+trrNml16tuTZraz9NytNbM5derbRDN728w+MbONZvaPpevr+twF/arL81bzl/1m1iRps6TbJbVLWinpAXf/pKYdKWBmbZJa3b3uY8JmdoukQ5KWuPuU0nX/Jmmvu/+69B/naHf/pwbp25OSDtV75+bShjIt5TtLS7pH0j+ojs9d0K/7VIfnrR5H/uslbXX37e5+XNKfJM2rQz8anruvkLT3tKvnSVpcurxYPX88NVfQt4bg7h3uvqZ0+aCkUztL1/W5C/pVF/UI/wRJO8q+b1djbfntkv5iZqvNbEG9O9OL5rKdkXZJaq5nZ3qR3Lm5lk7bWbphnrv+7HhdbXzg930z3f1aSXdKerj08rYhec97tkYarvm9pB+pZxu3Dkm/qWdnSjtLL5X0K3f/urxWz+eul37V5XmrR/h3SppY9v3FpesagrvvLH3tlLRMjbf78O5Tm6SWvnbWuT//r5F2bu5tZ2k1wHPXSDte1yP8KyVNNrNLzWyIpPslLa9DP77HzM4rfRAjMztP0k/VeLsPL5c0v3R5vqRX6tiX72iUnZuLdpZWnZ+7htvx2t1r/k/SHPV84r9N0j/Xow8F/bpM0rrSv4317pukF9XzMvCEej4b+bmksZLelLRF0n9JGtNAffsP9ezm/LF6gtZSp77NVM9L+o8lrS39m1Pv5y7oV12eN2b4AZniAz8gU4QfyBThBzJF+IFMEX4gU4QfyBThBzJF+IFM/R9VmSB3AwAeFgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAEQBJREFUeJzt3V9sVWW6BvDnpZTUlIkUSivQSueMjVEbZExDNGMUM4eJoyQ4N2S4mHASpJjMqJPMxRi9GL04iTk5M3O8OJmkcySDZnTGhDEgITKKJ0GiEWrltEBVwLbQUmmhU2ixBUrfc9HFpGLX+233v7U27/NLmrb72av93PJ07b2/tdYnqgoi8mdO0gMgomSw/EROsfxETrH8RE6x/EROsfxETrH8RE6x/EROsfxETs0t5i8TER5OOIvy8nIzr6mpMfOJiYnY7MqVK+a2ZWVlZn716lUznzdvnplfvHgxNquoqDC3nZqaMvPz58+beS5ExMzTfGSsqtqDj+RUfhF5GMBLAMoA/I+qvpjLz0szqyShgoQsXrzYzJ988kkz/+yzz2Kz06dPm9tWVVWZ+cjIiJnX1dWZeVtbW2zW2Nhobnvp0iUz37Vrl5mH/nhYQn+YxsfHs/7ZaZH1034RKQPw3wB+DOBOABtE5M58DYyICiuX1/yrABxX1S9U9TKAvwBYl59hEVGh5VL+ZQBOzfi+L7rta0SkRUTaRCT++R8RFV3B3/BT1VYArQDf8CNKk1z2/P0A6md8XxfdRkQlIJfyHwTQKCLfFZF5AH4KYGd+hkVEhSa5zFeKyCMA/gvTU31bVfXfA/dP7dP+uXPtV0CTk5MF+90333yzmb/33ntmPjo6GputWLHC3Lanp8fMjxw5Yub19fVmvnTp0tjs008/Nbfdv3+/mb/22mtm3tfXZ+Y3qqLM86vqbgC7c/kZRJQMHt5L5BTLT+QUy0/kFMtP5BTLT+QUy0/kVFHP50+zQs7jh9xxxx1mXl1dbeYHDhyIzUKnnoZOXQ1dSyB0PYC33norNgudyrxmzRozf+ONN8ycbNzzEznF8hM5xfITOcXyEznF8hM5xfITOcWpvhR44YUXzHxwcNDMrSvotra2mttu3rzZzHt7e83cumw4YF/9N3Tp7dDls2+55RYzD52u7B33/EROsfxETrH8RE6x/EROsfxETrH8RE6x/EROcZ4/BUKr/Ibm+U+cOBGbrV271tz25MmTZv7uu++aeVNTk5l3d3fHZg888IC5bUNDg5mHTkcmG/f8RE6x/EROsfxETrH8RE6x/EROsfxETrH8RE7lNM8vIj0ARgFcBTCpqs05/jwzz2U58SSVl5eb+Zw59t/g2267zcwPHToUmy1btszc9sKFC2a+evVqMw8t0W1d+vv48ePmtmNjY2ZeVVVl5mTLx0E+D6nq2Tz8HCIqIj7tJ3Iq1/IrgL+LyMci0pKPARFRceT6tP9+Ve0XkRoA74jIp6q6b+Ydoj8K/MNAlDI57flVtT/6PAjgTQCrZrlPq6o25/pmIBHlV9blF5FKEfnOta8B/AjA4XwNjIgKK5en/bUA3oym5+YCeE1V387LqIio4LIuv6p+AeDuPI6lZOfxQ0Ln64euPz80NJT1725vbzfzBx980Mxvv/12Mw8dozA8PBybNTY2mtuG1gSYN2+emZONU31ETrH8RE6x/EROsfxETrH8RE6x/ERO8dLdRRA6VXnp0qVm3tfXZ+bWVGHodODQ6cZlZWVmfu7cOTO3pjlXrFhhbnv06FEz7+rqMnOycc9P5BTLT+QUy0/kFMtP5BTLT+QUy0/kFMtP5BTn+Ytg8eLFZj4wMGDmoXl+69TWY8eOZb0tED4GYdGiRWZuzdXX1taa2956661m/uijj5p5R0eHmXvHPT+RUyw/kVMsP5FTLD+RUyw/kVMsP5FTLD+RUyU1z79p06bY7NVXXzW3vXz5cr6Hk7HJyUkzz3WufWRkJDYLLdG9f/9+M//888/N/LnnnjNz6xiH0CXJQ9cSOHuWi0Pngnt+IqdYfiKnWH4ip1h+IqdYfiKnWH4ip1h+IqeC8/wishXAWgCDqtoU3bYQwF8BNADoAbBeVf9RuGFOs+Z9Q3PpSRobGzPzzs5OM1+yZImZj4+Px2bd3d3mtqGxhebSQ497fX19bLZ8+XJz2ytXrpj5hQsXzJxsmez5/wTg4etuewbAXlVtBLA3+p6ISkiw/Kq6D8DwdTevA7At+nobgMfyPC4iKrBsX/PXquq1a099CcC+HhMRpU7Ox/arqoqIxuUi0gKgJdffQ0T5le2e/4yILAGA6PNg3B1VtVVVm1W1OcvfRUQFkG35dwLYGH29EcCO/AyHiIolWH4ReR3AhwBuF5E+EdkE4EUAa0TkGIB/jb4nohISfM2vqhtioh/meSxBO3bEP8GYmpoq4ki+HdXYt0QAACdPnjTz0PXrreMEmpvtV1uffPKJmff395v5/PnzzfzUqVOx2dWrV81trWMEAODgwYNmTjYe4UfkFMtP5BTLT+QUy0/kFMtP5BTLT+RUSV26+8yZM0kPISuhy4aHpqzWr19v5vfdd19sNjx8/TlZXxe6NPecOfb+IXTabUNDQ2wWelwqKyvNnHLDPT+RUyw/kVMsP5FTLD+RUyw/kVMsP5FTLD+RUyU1z1+qRMTMa2pqzLyrq8vMz507l/XvDs3jX7p0ycxDp+XOnRv/T6ynp8fcNnTJ8tBlx63/9tBp1h5wz0/kFMtP5BTLT+QUy0/kFMtP5BTLT+QUy0/kFOf5i6CiosLMDx8+bOaPP/64mb///vux2d13321uGxrb0NCQmU9MTJi5dRzAvffea24bWoLbWpoc4Fx+CPf8RE6x/EROsfxETrH8RE6x/EROsfxETrH8RE4F5/lFZCuAtQAGVbUpuu15AJsBXJsEflZVdxdqkKXuq6++MvPQMtcLFiww84ULF8Zme/bsMbc9ffq0mYeuy5+Lvr4+Mw8dY1DIsXmQyZ7/TwAenuX236vqyuiDxScqMcHyq+o+APayL0RUcnJ5zf8LEekQka0iUpW3ERFRUWRb/j8A+B6AlQAGAPw27o4i0iIibSLSluXvIqICyKr8qnpGVa+q6hSAPwJYZdy3VVWbVbU520ESUf5lVX4RmXlZ1Z8AsE9LI6LUyWSq73UAqwFUi0gfgN8AWC0iKwEogB4AWwo4RiIqgGD5VXXDLDe/XICxUIyRkREzLy8vj81C1+XPda58cnLSzK1z6i9evGhuW1Vlv488NTVl5mTjEX5ETrH8RE6x/EROsfxETrH8RE6x/ERO8dLdKTA4OGjmoek665Tg0HRa6GeHluC2lgcHgKVLl8ZmlZWV5rb9/f1mHppmJBv3/EROsfxETrH8RE6x/EROsfxETrH8RE6x/EROcZ4/Bay5cCA8Fz93bvz/xtBpsaF5/JDe3l4zX7RoUWwWOr7hpptuMvM0L8EtImaehrFzz0/kFMtP5BTLT+QUy0/kFMtP5BTLT+QUy0/kFOf5UyB0+ezQJaqt+fCysrKsxpSp0PLjTU1NsdnZs2fNbScmJrIaUxpUVFSY+fj4eJFGEo97fiKnWH4ip1h+IqdYfiKnWH4ip1h+IqdYfiKngvP8IlIP4BUAtQAUQKuqviQiCwH8FUADgB4A61X1H4Ub6o1ry5YtZl5XV2fm+/bti81C58zn6oMPPjDzu+66Kzarra01t+3u7jZz6zoGQO7Lj+ciDfP4IZns+ScB/EpV7wRwL4Cfi8idAJ4BsFdVGwHsjb4nohIRLL+qDqhqe/T1KIAuAMsArAOwLbrbNgCPFWqQRJR/3+o1v4g0APg+gI8A1KrqQBR9iemXBURUIjI+tl9E5gPYDuCXqnph5jXKVFVFZNaLkolIC4CWXAdKRPmV0Z5fRMoxXfw/q+rfopvPiMiSKF8CYNZ3llS1VVWbVbU5HwMmovwIll+md/EvA+hS1d/NiHYC2Bh9vRHAjvwPj4gKJZOn/T8A8DMAnSJyKLrtWQAvAnhDRDYB6AWwvjBDvPHt3r3bzJcvX27mly5dis1Cp5aGThcOCZ0yXF1dHZvt2rXL3PbDDz808ySn8m4EwfKr6n4AcRch/2F+h0NExcIj/IicYvmJnGL5iZxi+YmcYvmJnGL5iZxK1aW7Q3PGuS4nnVYdHR1mXl9fb+b33HNPbPb2229nNaZM1dTUmHlPT09s9tBDD2W9LVAay2CnGff8RE6x/EROsfxETrH8RE6x/EROsfxETrH8RE6lap4/NI9vzeuW8pzuiRMnzHxsbMzMBwYGYrNz585lNaZMdXZ2mvmyZctis9Dy3hcvXjTzUv5/ngbc8xM5xfITOcXyEznF8hM5xfITOcXyEznF8hM5lap5/pAbdV73/PnzZm5dlx+wz6l/4oknzG03b95s5iEHDhww86effjo227t3r7ntRx99lNWYiuFGuJYA9/xETrH8RE6x/EROsfxETrH8RE6x/EROsfxETgXn+UWkHsArAGoBKIBWVX1JRJ4HsBnAUHTXZ1XVXmieZhWax1+wYIGZW8cJtLe3ZzWmTE1MTJj55ORkbDY0NBSbAUBLS4uZP/XUU2ZeSKUwjx+SyUE+kwB+partIvIdAB+LyDtR9ntV/c/CDY+ICiVYflUdADAQfT0qIl0A4i/PQkQl4Vu95heRBgDfB3DtuMtfiEiHiGwVkaqYbVpEpE1E2nIaKRHlVcblF5H5ALYD+KWqXgDwBwDfA7AS088MfjvbdqraqqrNqtqch/ESUZ5kVH4RKcd08f+sqn8DAFU9o6pXVXUKwB8BrCrcMIko34Lll+nTl14G0KWqv5tx+5IZd/sJgMP5Hx4RFUom7/b/AMDPAHSKyKHotmcBbBCRlZie/usBsKUgI3QgNG0UuqT58PBwbFZRUZHVmDI1Ojpq5vPnz8962+3bt2c1JspMJu/27wcw28nLnNMnKmE8wo/IKZafyCmWn8gplp/IKZafyCmWn8ipkrp0940qNM+/Z88eM6+rq4vNQqfc5so6ZRcAdu+OnxGurq42t+3t7c1qTJQZ7vmJnGL5iZxi+YmcYvmJnGL5iZxi+YmcYvmJnJJiXoJYRIYAzJy8rQZwtmgD+HbSOra0jgvg2LKVz7EtV9XFmdyxqOX/xi8XaUvrtf3SOra0jgvg2LKV1Nj4tJ/IKZafyKmky9+a8O+3pHVsaR0XwLFlK5GxJfqan4iSk/Sen4gSkkj5ReRhEflMRI6LyDNJjCGOiPSISKeIHEp6ibFoGbRBETk847aFIvKOiByLPs+6TFpCY3teRPqjx+6QiDyS0NjqReR/ReSoiBwRkaej2xN97IxxJfK4Ff1pv4iUAfgcwBoAfQAOAtigqkeLOpAYItIDoFlVE58TFpEHAIwBeEVVm6Lb/gPAsKq+GP3hrFLVX6dkbM8DGEt65eZoQZklM1eWBvAYgH9Dgo+dMa71SOBxS2LPvwrAcVX9QlUvA/gLgHUJjCP1VHUfgOtX5FgHYFv09TZM/+MpupixpYKqDqhqe/T1KIBrK0sn+tgZ40pEEuVfBuDUjO/7kK4lvxXA30XkYxFpSXows6iNlk0HgC8B1CY5mFkEV24uputWlk7NY5fNitf5xjf8vul+Vb0HwI8B/Dx6eptKOv2aLU3TNRmt3Fwss6ws/U9JPnbZrnidb0mUvx9A/Yzv66LbUkFV+6PPgwDeRPpWHz5zbZHU6PNgwuP5pzSt3DzbytJIwWOXphWvkyj/QQCNIvJdEZkH4KcAdiYwjm8QkcrojRiISCWAHyF9qw/vBLAx+nojgB0JjuVr0rJyc9zK0kj4sUvditeqWvQPAI9g+h3/EwCeS2IMMeP6FwD/F30cSXpsAF7H9NPAK5h+b2QTgEUA9gI4BuBdAAtTNLZXAXQC6MB00ZYkNLb7Mf2UvgPAoejjkaQfO2NciTxuPMKPyCm+4UfkFMtP5BTLT+QUy0/kFMtP5BTLT+QUy0/kFMtP5NT/A8xInlLcqIdzAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAErlJREFUeJzt3WtslWW2B/D/4lKQtoBcLLVUO9xErQrHhpxEo4MejUcmwUkMQaIyBqfzYUxmkjEeIx+OiV/MiTPjGJMxTCQDJyNgMkPUxBxHFKJDFLmEgyA4gEJogYJykXIrtOt86MucjvZda7tv7y7r/0sa2v3vs/fDhtV37z43UVUQUTyDsu4AEWWDxU8UFIufKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwpqSDkfTEQ4nZCoxFRVcvm+gq78InKfiHwuIntE5OlC7otKY9CgQeaHiJgfnkLbU3Yk37n9IjIYwN8B3AOgDcBGAA+p6mdGG175y2zQIPvnu/fv7+VegXPtSPmV48o/C8AeVf1CVbsArAQwt4D7I6IyKqT4GwAc6PN1W3LbPxGRVhHZJCKbCngsIiqykv/CT1WXAFgC8GU/USUp5MrfDqCxz9cTk9uIaAAopPg3ApgqIj8QkSoA8wG8WZxuEVGp5f2yX1UvisgTAN4BMBjAUlXdUbSeUc7mz5+fmi1fvtxs293dbeYXLlww89raWjPfvXt3ajZt2jSzLZVWQe/5VfVtAG8XqS9EVEac3ksUFIufKCgWP1FQLH6ioFj8REGx+ImCKut6fsrPmDFjzHzp0qWp2fnz5822I0aMMPPTp0+buXf/VvvZs2ebbdeuXWvmVBhe+YmCYvETBcXiJwqKxU8UFIufKCgWP1FQeW/gmdeDBd3Jp6qqyszXr19v5lOmTDHzUaNGpWbeUF1XV5eZV1dXm7n3/+fcuXOpWU1Njdn2o48+MnNvmLKlpcXML1dl2bqbiAYuFj9RUCx+oqBY/ERBsfiJgmLxEwXF4icKikt6i6CpqcnMX3/9dTP3xvEPHz5s5sOHD0/NLl68aLa15ggAwMGDB818/PjxZm45duyYmY8cOdLM6+vrzfy1115LzVpbW822nZ2dZn454JWfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwqqoHF+EdkH4BSAbgAXVTXkAuq7777bzI8fP27m+/fvN/O6ujozt9bsnzx50mzrbb3d09OT92MDwJkzZ1KzoUOHmm1Xrlxp5nPmzDHzK664IjX75JNPzLY33HCDmV8OijHJZ7aqflWE+yGiMuLLfqKgCi1+BfBXEdksIvZ8SSKqKIW+7L9dVdtF5CoA74rILlX9oO83JD8U+IOBqMIUdOVX1fbkzyMAVgOY1c/3LFHVlqi/DCSqVHkXv4hUi0jtpc8B3Atge7E6RkSlVcjL/joAq0Xk0v28pqr/U5ReEVHJcd/+HE2YMCE127Jli9m2u7vbzAtZEw8A+/btS80mT55strX21QeAVatWmfm8efPMvJA5CF7frHF8AJg4cWJqNmSIfd3zxvn37t1r5lnivv1EZGLxEwXF4icKisVPFBSLnygoFj9RUNy6O0djx45NzY4cOWK29ZbkeltYe0OFtbW1qdnXX39tth00yP75b/29Ab9v1v17y4WnTZtW0GN3dHSkZg0NDWbb5557zswXLFhg5gMBr/xEQbH4iYJi8RMFxeInCorFTxQUi58oKBY/UVAc58/R448/npp5y2arq6vN3DsO2jsme/To0alZst9Cql27dpn5N998Y+ZVVVVmvmbNmtTMWiYN+PMjLly4YOZdXV2pmXd0+S233GLmlwNe+YmCYvETBcXiJwqKxU8UFIufKCgWP1FQLH6ioDjOn6OZM2emZl988YXZ1juKetSoUWZujeN7979582az7Y033mjm77//vpl78wis48u99fjePgnWOD4AXH311anZ2bNnzbYjR44088sBr/xEQbH4iYJi8RMFxeInCorFTxQUi58oKBY/UVDuEd0ishTAjwAcUdXm5LYxAFYBaAKwD8A8VT3uPtgAPqLbOobb2+P9lVdeMXPvqGlv7/1x48alZt66dWvPfwDYvn27mXv731tj+d4R3I2NjWbuHfF96tSp1Gz48OFmWy/35mZkqZhHdP8RwH3fuu1pAO+p6lQA7yVfE9EA4ha/qn4A4NtHyswFsCz5fBmAB4rcLyIqsXzf89ep6qHk88MA7P2WiKjiFDy3X1XVei8vIq0AWgt9HCIqrnyv/B0iUg8AyZ+pKzBUdYmqtqhqS56PRUQlkG/xvwlgYfL5QgBvFKc7RFQubvGLyAoAHwG4TkTaRGQRgOcB3CMiuwH8W/I1EQ0g7nt+VX0oJUpfqH0ZampqSs2eeuops603ln78uD1Fwts731p7XlNTY7b11rV7a+rPnz9v5tb+92+99ZbZ1tvHYMOGDWZuzQO49957zbYjRoww88sBZ/gRBcXiJwqKxU8UFIufKCgWP1FQLH6ioLh1d46GDEl/qqZNm2a23bt3r5lfe+21Zu5tI20tXW1vb8+7LeAPU1rbY3vuuusuM/eO4PaWG1vPq7dtuLct+OWAV36ioFj8REGx+ImCYvETBcXiJwqKxU8UFIufKCh36+6iPlgFb93tLX3t6OhIzbwxYW9rbm/Jbltbm5lPmDAhNfP+fY8ePWrm3vba3jwA6+82duxYs603v8Fbjrxu3brUzDua3DqSHfCPXc9SMbfuJqLLEIufKCgWP1FQLH6ioFj8REGx+ImCYvETBcVx/oQ3bmsd0e2N41tHaAP+Edxnzpwx82PHvn2O6v+bNGmS2ba+vt7MT58+bebemntr3bw3Tu/lBw4cMHNrPb81NwLwt+72/s29o9FLieP8RGRi8RMFxeInCorFTxQUi58oKBY/UVAsfqKg3H37RWQpgB8BOKKqzcltzwL4KYBLi8GfUdW3S9XJcvDWjk+fPj0189a8e2PC3ji+Nxejubk5Nauuri7ovr05CN5eBo2NjamZd7y3929SVVVl5tbz7j0v3vwGb15IluP8ucrlyv9HAPf1c/tvVXVG8jGgC58oIrf4VfUDAOlTyIhoQCrkPf8TIrJNRJaKyJVF6xERlUW+xf97AJMBzABwCMCv075RRFpFZJOIbMrzsYioBPIqflXtUNVuVe0B8AcAs4zvXaKqLarakm8niaj48ip+Eem7FOzHAOzjUomo4uQy1LcCwA8BjBORNgD/CeCHIjIDgALYB+BnJewjEZWAW/yq+lA/N79agr5k6rHHHjNzazzcG28+ceKEmR88eNDMr7vuOjM/dOhQauatt/fG0js7O8184sSJZm6tyffG2r2x9KuuusrM9+7dm5o1NDSYbUXsJfFz584185UrV5p5JeAMP6KgWPxEQbH4iYJi8RMFxeInCorFTxQUt+5OXHPNNWa+bdu21GzQIPtnqDds5A1pnTx50sytY7CHDx9uth09erSZe8tuvWHODz/8MDWzliIDhT9v1hDqjBkzzLbWluMAMHXqVDPfv3+/mZcSt+4mIhOLnygoFj9RUCx+oqBY/ERBsfiJgmLxEwXlLumNYvXq1WZeW1ubmvX09JhtvWW1Xu7NI7DGpK1+5/LYGzduNHNvWe2UKVNSM29Lc28c35uDYM3d8LbW9rZT9+ZPDAS88hMFxeInCorFTxQUi58oKBY/UVAsfqKgWPxEQXGcP+EdRW2NtXtjxkOG2E+ztcV0Lvdvbe3tPfbRo0fN3Fvv743zW0d4Dxs2zGzrzUEYN26cmXd0dKRm3hwCb5+CRx991MwXL15s5pWAV36ioFj8REGx+ImCYvETBcXiJwqKxU8UFIufKCh3nF9EGgEsB1AHQAEsUdXficgYAKsANAHYB2Ceqh4vXVcL4+0Bb+19DwDHjh1Lzbyjpr2zEZqamszcW7duHaNdU1NjtvXGu73cm4NgzQM4d+6c2XbUqFFmfurUKTO3/l28fnvj/JMnTzbzgSCXK/9FAL9S1RsA/CuAn4vIDQCeBvCeqk4F8F7yNRENEG7xq+ohVd2SfH4KwE4ADQDmAliWfNsyAA+UqpNEVHzf6z2/iDQBmAlgA4A6VT2URIfR+7aAiAaInOf2i0gNgD8D+KWqftP3PbSqato5fCLSCqC10I4SUXHldOUXkaHoLfw/qepfkps7RKQ+yesBHOmvraouUdUWVW0pRoeJqDjc4pfeS/yrAHaq6m/6RG8CWJh8vhDAG8XvHhGVSi4v+28D8AiAT0Vka3LbMwCeB/C6iCwCsB/AvNJ0sTi84bYXXnjBzGfOnJmaHThwwGx70003mXlbW5uZF3qMtsUbppw0aZKZ79q1y8ytLa5HjBhhtv3888/N3HterGXYp0+fLuix161bZ+YDgVv8qvo3AGmD5HcXtztEVC6c4UcUFIufKCgWP1FQLH6ioFj8REGx+ImCCrN1t7c09bbbbjNza0z5yiuvNNt64/DeEdze9trWFtbe1t3estgdO3aYeUuLPXHTWkp99uxZs+2JEyfMvK7OXk4ycuTI1OzkyZNm2+bmZjN/6aWXzHwg4JWfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwpKvHXuRX2wlK2+ysEb7/bGq9euXZuaffXVV2bb8ePHm/nOnTvNvLGx0cyt8Wxry3EA6OnpMfP29nYzP3z4sJlff/31qdmYMWPMtl7fvfkV1twObx8D7+/l5bfeequZl5Kq2vvUJ3jlJwqKxU8UFIufKCgWP1FQLH6ioFj8REGx+ImCCrOe3xvPfuSRR8zcWpdurafPhbeufdiwYWa+fv361Gz69Olm24MHD5r5l19+aeZjx441c+u52bx5s9nWOw9hwYIFZm7NE/COLq+vrzfzrq4uMx8IeOUnCorFTxQUi58oKBY/UVAsfqKgWPxEQbH4iYJyx/lFpBHAcgB1ABTAElX9nYg8C+CnAC5tKv+Mqr5dqo4Wytu3wFszb7Wvqqoy2+7Zs8fMW1tbzbyhocHMn3zyydTMGyvfunWrmd9xxx1m/vDDD5v5/PnzU7OPP/7YbDtnzhwzf/nll838wQcfNHOLNa8DAFasWJH3fVeKXCb5XATwK1XdIiK1ADaLyLtJ9ltVfaF03SOiUnGLX1UPATiUfH5KRHYCsC9FRFTxvtd7fhFpAjATwIbkpidEZJuILBWRfvdUEpFWEdkkIpsK6ikRFVXOxS8iNQD+DOCXqvoNgN8DmAxgBnpfGfy6v3aqukRVW1TV3iSPiMoqp+IXkaHoLfw/qepfAEBVO1S1W1V7APwBwKzSdZOIis0tfun9teerAHaq6m/63N532dOPAWwvfveIqFTcrbtF5HYAHwL4FMCldbHPAHgIvS/5FcA+AD9Lfjlo3VdmW3d7R3Rv2mT/SuLmm29OzS5evGi2XbZsmZl7w0adnZ1mbh2j7W1vfeedd5r5O++8Y+be/VvHbHtDpB0dHWbe3d1t5mvWrEnNvCFMz6xZ9gvdLVu2FHT/hch16+5cftv/NwD93VnFjukTkY8z/IiCYvETBcXiJwqKxU8UFIufKCgWP1FQYY7o9gwePNjMZ8+enZo1NzebbV988cW8+kSFseYwLF682Gy7aNEiM/eOLve2ii8lHtFNRCYWP1FQLH6ioFj8REGx+ImCYvETBcXiJwqq3OP8RwHs73PTOABfla0D30+l9q1S+wWwb/kqZt+uVdXxuXxjWYv/Ow8usqlS9/ar1L5Var8A9i1fWfWNL/uJgmLxEwWVdfEvyfjxLZXat0rtF8C+5SuTvmX6np+IspP1lZ+IMpJJ8YvIfSLyuYjsEZGns+hDGhHZJyKfisjWrI8YS45BOyIi2/vcNkZE3hWR3cmf9t7Z5e3bsyLSnjx3W0Xk/oz61igia0XkMxHZISK/SG7P9Lkz+pXJ81b2l/0iMhjA3wHcA6ANwEYAD6nqZ2XtSAoR2QegRVUzHxMWkTsAdAJYrqrNyW3/BeCYqj6f/OC8UlX/o0L69iyAzqxPbk4OlKnve7I0gAcA/AQZPndGv+Yhg+ctiyv/LAB7VPULVe0CsBLA3Az6UfFU9QMAx75181wAl04BWYbe/zxll9K3iqCqh1R1S/L5KQCXTpbO9Lkz+pWJLIq/AcCBPl+3obKO/FYAfxWRzSLSmnVn+lHX52SkwwDqsuxMP9yTm8vpWydLV8xzl8+J18XGX/h91+2q+i8A/h3Az5OXtxVJe9+zVdJwTU4nN5dLPydL/0OWz12+J14XWxbF3w6gsc/XE5PbKoKqtid/HgGwGpV3+nDHpUNSkz+PZNyff6ikk5v7O1kaFfDcVdKJ11kU/0YAU0XkByJSBWA+gDcz6Md3iEh18osYiEg1gHtReacPvwlgYfL5QgBvZNiXf1IpJzennSyNjJ+7ijvxWlXL/gHgfvT+xn8vgMVZ9CGlX5MA/G/ysSPrvgFYgd6XgRfQ+7uRRQDGAngPwG4AawCMqaC+/Td6T3Peht5Cq8+ob7ej9yX9NgBbk4/7s37ujH5l8rxxhh9RUPyFH1FQLH6ioFj8REGx+ImCYvETBcXiJwqKxU8UFIufKKj/A/D6Ugih7tOkAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAErhJREFUeJzt3W2MleWZB/D/JYwIw9sMyDAOI1MIbpigjDriKrjpprZSbfDlg6kxEU1TGlOTNumHGvbD+mUTs27b9WVtQldSNGq7SUH8YLCsWYM1awMSVlBUXgUG5oXXAWZkgLn2wzw0o85zXcfz9jzj9f8lhJnzP/d5bg5zzXPOuZ/7vkVVQUTxXJZ1B4goGyx+oqBY/ERBsfiJgmLxEwXF4icKisVPFBSLnygoFj9RUGOreTAR4eWERaipqTHzlpaW1GxgYMBsO2HCBDMfM2aMmY8da/8I9ff3p2a7du0y2w4ODpo5jUxVpZD7SSmX94rIUgBPAxgD4D9V9Unn/iz+IjQ2Npr56tWrU7PDhw+bbRcuXGjmdXV1Zl5fX2/mO3bsSM3uuusus21vb6+Z08gKLf6iX/aLyBgA/wHg+wBaATwgIq3FPh4RVVcp7/kXAditqntVdQDAHwDcXZ5uEVGllVL8TQAODvv+UHLbF4jIChHZIiJbSjgWEZVZxT/wU9VVAFYBfM9PlCelnPk7ADQP+35WchsRjQKlFP9mAPNE5FsicjmAHwJ4vTzdIqJKK3Wo704A/46hob7Vqvovzv35sn8El11m/w5ev369mVtDZp9//rnZdvz48WZ+8eJFM/euA7DG6t955x2z7dq1a838mWeeMfOoCh3qK+k9v6q+AeCNUh6DiLLBy3uJgmLxEwXF4icKisVPFBSLnygoFj9RUFWdz08jmzRpkpnfdtttZn7u3LnUbN26dWbbpqavTMf4gnfffdfMRewh5blz56Zmy5YtM9sePHjQzKk0PPMTBcXiJwqKxU8UFIufKCgWP1FQLH6ioDjUlwNnz541c2847YorrkjNli5darZ95ZVXzPzhhx8286lTp5r5xx9/bOYWawgT8Jcd7+vrK/rYEfDMTxQUi58oKBY/UVAsfqKgWPxEQbH4iYJi8RMFVdLS3V/7YEGX7h43bpyZ33zzzWb+5ptvmnlPT09qduzYMbPtggULzPz06dNm7i3dbS1L7o3Tb9y40cy9pb2t3YsvXLhgth3NKr5LLxGNbix+oqBY/ERBsfiJgmLxEwXF4icKisVPFFSpW3TvB3AawEUAF1S13bn/N3Kc/9lnnzXztrY2M7/22mvN3JqvDwADAwOp2cmTJ822Y8faSzrMmDHDzPv7+828pqYmNfO2D/fy2tpaM9+yZUtq9uijj5ptS1mHIGtV2aI78Y+qerQMj0NEVcSX/URBlVr8CuDPIvK+iKwoR4eIqDpKfdm/RFU7RGQGgI0i8rGqbhp+h+SXAn8xEOVMSWd+Ve1I/u4GsA7AohHus0pV270PA4mouooufhGpFZFJl74G8D0AO8rVMSKqrFJe9jcAWJcsKz0WwCuquqEsvSKiiuN8/jLwxrq9dffPnz9v5t5YfClz00vtu7e9+KFDh1Iz6xoAAGhsbDRza60AwL7Gwft3Nzc3m3mecT4/EZlY/ERBsfiJgmLxEwXF4icKisVPFBS36C6Qtcz08ePHzbbekFR3d7eZ79u3z8yvu+661GzHDvu6q1tuucXM9+zZY+Y33HCDmY8fPz4127p1q9n2wIEDZu4teW4NkXpLjldyeDUveOYnCorFTxQUi58oKBY/UVAsfqKgWPxEQbH4iYLiOH+BrHFfbwnprq4uM584caKZe1NfrXzJkiVmW2+8e8qUKWY+ODho5tby27feeqvZtrOz08y9sfjJkyenZt626VdeeaWZHzlyxMxHA575iYJi8RMFxeInCorFTxQUi58oKBY/UVAsfqKgOM5foE2bNqVm1px1AKivrzfzo0ftTY6bmprM3Bqzfvvtt822s2fPNvNjx46Z+bx588zc2uLb23rcG8cvZXtwbz6+95xznJ+IRi0WP1FQLH6ioFj8REGx+ImCYvETBcXiJwrK3aJbRFYD+AGAblVdkNxWD+CPAFoA7Adwv6qecA82irfotsbin3/+ebPtypUrzdwbcz5z5oyZd3R0pGbefPy6ujoz98azvbF46+fLu/6hr6/PzL32l19+eWrm7WfgPW9z5swx8yyVc4vu3wNY+qXbHgfwlqrOA/BW8j0RjSJu8avqJgBf3pLmbgBrkq/XALinzP0iogor9j1/g6peej3YCaChTP0hoiop+dp+VVXrvbyIrACwotTjEFF5FXvm7xKRRgBI/k7daVJVV6lqu6q2F3ksIqqAYov/dQDLk6+XA1hfnu4QUbW4xS8irwL4XwB/JyKHRORHAJ4E8F0R2QXg9uR7IhpF3Pf8qvpASvSdMvcl16w14G+66Sazrbc2vog9LHvZZfbv6AkTJqRm3nx8a749AOzdu9fMp0+fbubWePjJkyfNtr29vWbu7Xdg7SngrWNgPaffFLzCjygoFj9RUCx+oqBY/ERBsfiJgmLxEwXFpbsLZA3HeUtvFzBt2swnTZpk5ps3b07NWltbzbYXL14087a2NjP3pvRaU2Ot4VPAn+p84oQ9i3zmzJmpmbV1OGA/p98UPPMTBcXiJwqKxU8UFIufKCgWP1FQLH6ioFj8REFxnL8MFi9eXFL78+fPm7k33j1t2rTUzLvGwNsm2zt2bW2tmff09KRm3r/bWnob8KcjW1OhDxw4YLa95pprzPybgGd+oqBY/ERBsfiJgmLxEwXF4icKisVPFBSLnygojvMXyFp+21veurm52czPnj1b9LEBoL+/PzXz5ut71wFYy18DwKlTp8zcGssvZeltwN8+fNasWanZVVddZbb1rn/w1mDwntc84JmfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwrKHecXkdUAfgCgW1UXJLc9AeDHAC5N1l6pqm9UqpPVUMo22t54s7e2vZd7Y/XWVtfeuvveY3d0dJi5N5+/vr6+6Lbnzp0z887OTjNvampKzbxrDLxx/HHjxpm5ty9AHhRy5v89gKUj3P4bVW1L/ozqwieKyC1+Vd0E4HgV+kJEVVTKe/7HROQDEVktInVl6xERVUWxxf9bAHMBtAE4AuBXaXcUkRUiskVEthR5LCKqgKKKX1W7VPWiqg4C+B2ARcZ9V6lqu6q2F9tJIiq/oopfRBqHfXsvgB3l6Q4RVUshQ32vAvg2gOkicgjAPwP4toi0AVAA+wH8pIJ9JKIKcItfVR8Y4eYXKtCXTHnzty01NTVm7o1X796928xbW1vNfO7cuWZusda2B4C6OvuzXG/t/KNHj6Zm3rr83lh5qXsOWAYGBsx86tSpZu5dg5AHvMKPKCgWP1FQLH6ioFj8REGx+ImCYvETBcWluxPedtHWcJ03NdWbHjp//nwz91hTehsaGsy23pCWN3XVYw0lekN53v/JlClTzNxaPtubwu1N+T1x4oSZjwY88xMFxeInCorFTxQUi58oKBY/UVAsfqKgWPxEQXGcPzFz5kwzt5a4njNnjtnWG0v3jB8/3swbGxtTM2+82pv2unPnTjP3prZOnz49NfOWDfdy7xoEa8qw99iHDx8u6djeNO484JmfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwqK4/yJhx56yMytcd2rr7666LYA0NfXZ+b9/f1mPmvWrNTMmusP+Mtfe+sBeH3/9NNPUzNvHQNrPj4ATJgwwcyt9QC86xu858Vba2A04JmfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwpKvLFUEWkG8CKABgAKYJWqPi0i9QD+CKAFwH4A96uquZi5iNgHy5C3zfamTZtSs7a2NrPt2bNnzdzbqnrDhg1mvmjRotRs2rRpZltvPHv79u1m7m0Pbj2v3px67/qG/fv3m/n111+fmpV6/YP37+7u7jbzSlJVe6OIRCFn/gsAfqGqrQD+HsBPRaQVwOMA3lLVeQDeSr4nolHCLX5VPaKqW5OvTwPYCaAJwN0A1iR3WwPgnkp1kojK72u95xeRFgDXA/grgAZVPZJEnRh6W0BEo0TB1/aLyEQAfwLwc1XtHb7/nKpq2vt5EVkBYEWpHSWi8irozC8iNRgq/JdVdW1yc5eINCZ5I4ARP+FQ1VWq2q6q7eXoMBGVh1v8MnSKfwHATlX99bDodQDLk6+XA1hf/u4RUaUUMtS3BMA7ALYDGExuXomh9/3/BeBqAJ9haKjvuPNYuR3qa2lpMfNPPvkkNfOW5vaG8np7e83cmz5qbYPtDadZy34D/tTXrq4uM58xY0Zq5m1dvnfvXjP32lv/p9724N7/yY033mjmo2Goz33Pr6p/AZD2YN/5Op0iovzgFX5EQbH4iYJi8RMFxeInCorFTxQUi58oKC7dnbjjjjvM3Brv9qZ/vvfee2be2tpq5nv27DFz6/jeFtqnT5828zNnzpi5dY0BYI93e2Ptx44dM/Pdu3ebuTXt1tv23HvOvWXDRwOe+YmCYvETBcXiJwqKxU8UFIufKCgWP1FQLH6ioNz5/GU9WI7n80+ePNnMT5xIX5Xcm1fe2dlp5t58f28J64kTJ6ZmY8aMKbotABw9etTMPS+//HJq9thjj5ltBwcHzdz72bXWQfD+3S+99JKZP/LII2buraNQSeVcupuIvoFY/ERBsfiJgmLxEwXF4icKisVPFBSLnygozudPeNtsW1s619fXm20/++wzM58/f76Ze9uH79y5s+jH9ubUnzp1ysy9PQvuu+++oh/7ueeeM/MHH3zQzK3rJ7z5/N7W5k1NTWZ+4MABM88DnvmJgmLxEwXF4icKisVPFBSLnygoFj9RUCx+oqDc+fwi0gzgRQANABTAKlV9WkSeAPBjAD3JXVeq6hvOY+V2Pv/s2bPN/KOPPkrNxo0bZ7Y9ePCgmXvz+b3xbmte/LZt28y2ixcvNvPa2loz7+joMHNrPYEpU6aYbY8fP27mW7duNfNly5alZt5+A+fOnTPze++918w3bNhg5pVU6Hz+Qi7yuQDgF6q6VUQmAXhfRDYm2W9U9d+K7SQRZcctflU9AuBI8vVpEdkJwL68iYhy72u95xeRFgDXA/hrctNjIvKBiKwWkbqUNitEZIuIbCmpp0RUVgUXv4hMBPAnAD9X1V4AvwUwF0Abhl4Z/Gqkdqq6SlXbVbW9DP0lojIpqPhFpAZDhf+yqq4FAFXtUtWLqjoI4HcAFlWum0RUbm7xy9DStC8A2Kmqvx52e+Owu90LYEf5u0dElVLIUN8SAO8A2A7g0lrKKwE8gKGX/ApgP4CfJB8OWo+V26G+sWPtzz57enpSM2/IylvG2Tu2t3T3vn37UrOWlhazrTdd2Fv6u6+vz8xfe+211Oz2228v6bG94VlrOM/7ube2ZAf8bdmruST+CMcuz1Cfqv4FwEgPZo7pE1G+8Qo/oqBY/ERBsfiJgmLxEwXF4icKisVPFBSX7k5447K7du1KzRYuXGi29baDtraSBvwtwK2+e+P03jUG3tRWbwlsb2lwS6lj5R9++GFqNnXqVLPtU089VdKxRwOe+YmCYvETBcXiJwqKxU8UFIufKCgWP1FQLH6ioNz5/GU9mEgPgOH7VU8HcLRqHfh68tq3vPYLYN+KVc6+zVbVKwu5Y1WL/ysHF9mS17X98tq3vPYLYN+KlVXf+LKfKCgWP1FQWRf/qoyPb8lr3/LaL4B9K1Ymfcv0PT8RZSfrMz8RZSST4heRpSLyiYjsFpHHs+hDGhHZLyLbRWRb1luMJdugdYvIjmG31YvIRhHZlfw94jZpGfXtCRHpSJ67bSJyZ0Z9axaR/xGRj0TkQxH5WXJ7ps+d0a9Mnreqv+wXkTEAPgXwXQCHAGwG8ICqpu+BXUUish9Au6pmPiYsIv8A4AyAF1V1QXLbvwI4rqpPJr8461T1lznp2xMAzmS9c3OyoUzj8J2lAdwD4GFk+NwZ/bofGTxvWZz5FwHYrap7VXUAwB8A3J1BP3JPVTcB+PIm9XcDWJN8vQZDPzxVl9K3XFDVI6q6Nfn6NIBLO0tn+twZ/cpEFsXfBODgsO8PIV9bfiuAP4vI+yKyIuvOjKBh2M5InQAasuzMCNydm6vpSztL5+a5K2bH63LjB35ftURVbwDwfQA/TV7e5pIOvWfL03BNQTs3V8sIO0v/TZbPXbE7XpdbFsXfAaB52PezkttyQVU7kr+7AaxD/nYf7rq0SWryd3fG/fmbPO3cPNLO0sjBc5enHa+zKP7NAOaJyLdE5HIAPwTwegb9+AoRqU0+iIGI1AL4HvK3+/DrAJYnXy8HsD7DvnxBXnZuTttZGhk/d7nb8VpVq/4HwJ0Y+sR/D4B/yqIPKf2aA+D/kj8fZt03AK9i6GXgeQx9NvIjANMAvAVgF4D/BlCfo769hKHdnD/AUKE1ZtS3JRh6Sf8BgG3Jnzuzfu6MfmXyvPEKP6Kg+IEfUVAsfqKgWPxEQbH4iYJi8RMFxeInCorFTxQUi58oqP8HxIdERVvgsgsAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAEp1JREFUeJzt3XtsVWW6BvDnFVpu5SK3gp0e0NKcKBc7x0YxBxHRMYoXnMSghoyYGCBmJlEzfxzj8ZaYk+iJo/LHiaZzMFMncxiPMgoqHvCOJNpQCDeHkYuCtZSWcpGb5VLe80cXk452vd9mr7332uV9fomh3c/+ur9sedh791trfaKqICJ/Lkh7AkSUDpafyCmWn8gplp/IKZafyCmWn8gplp/IKZafyCmWn8ipvoV8MBHh4YRZuOAC+9/oQYMGxWYdHR3m2M7OzqzmdFboCNFRo0bFZu3t7ebYkpISMz916pSZnzlzxszPV6oqmdwvUflF5CYAiwD0AfDfqvpMkp9HPSsrKzPzK664Ijbbtm2bOfbgwYNZzemsUAHvuuuu2Ky+vt4cO3r0aDNvaWkx8+PHj8dmPKw9wdt+EekD4L8A3AzgMgD3iMhluZoYEeVXks/8VwLYoapfq+pJAH8GMDs30yKifEtS/goATd2+/y667R+IyAIRaRSRxgSPRUQ5lvdf+KlqHYA6gL/wIyomSV75mwFUdvv+Z9FtRNQLJCn/WgDVInKxiJQCuBvA8txMi4jyTZIseYjILAAvomup7xVV/Y/A/fm2vwf9+/c38wcffNDM58yZE5tt3LjRHDtv3jwzb2trM/MxY8aY+bPPPhub1dTUmGOnTJli5s8//7yZP/fcc2aehIi9lJ7mUmJB1vlVdQWAFUl+BhGlg4f3EjnF8hM5xfITOcXyEznF8hM5xfITOZVonf+cH4zr/D0KrXd/9tlnZr53797YbOfOnebYcePGmfmxY8fM3LqWAABMmDAhNgutlb/zzjtm/thjj5n5li1bzPx8lek6P1/5iZxi+YmcYvmJnGL5iZxi+YmcYvmJnCropbupZ4cOHTLzAQMGmLl1eeyhQ4eaYxsaGsw8NLfQKb+33357bHbxxRebY6+++mozD82NbHzlJ3KK5SdyiuUncorlJ3KK5SdyiuUncorlJ3KK6/xFIHSJ6tAW3fv374/NRo4caY6dOXOmmYdOi7377rvNfPfu3bHZ0qVLzbH9+vUz8++//97MycZXfiKnWH4ip1h+IqdYfiKnWH4ip1h+IqdYfiKnkm7RvQvAEQCdAE6ram3g/i4v3V1aWmrm7733npnPmDHDzN9+++3YbPbs2ebY06dPm3noGIPQ5bc///zz2Cx0fEPofP0vv/zSzG+++ebYLM0ttPOtIFt0R65T1fYc/BwiKiC+7SdyKmn5FcAqEVknIgtyMSEiKoykb/unqWqziIwG8L6I/E1VV3e/Q/SPAv9hICoyiV75VbU5+rMNwJsAruzhPnWqWhv6ZSARFVbW5ReRQSIy+OzXAG4E4HNnRKJeKMnb/nIAb0ZLPX0B/I+q/l9OZkVEeZd1+VX1awCX53Au562+fe2nObSWvnnzZjM/efJkbLZ161Zz7JgxY8w8NL6iosLM+/TpE5sdOHDAHBs6hsD62QBQWVkZm3377bfmWA+41EfkFMtP5BTLT+QUy0/kFMtP5BTLT+QUL91dACNGjDDzl19+2cxffPFFM7dOfe3o6DDHLlu2zMyHDRtm5qHTcrdt2xabhU51ti77DQBHjx4189ASqmXw4MGJHrs3nDLMV34ip1h+IqdYfiKnWH4ip1h+IqdYfiKnWH4ipxJduvucH8zppbtDp56+9dZbZl5VVWXm1nr2kCFDsh4LADU1NWa+Z88eM7dOy/3mm2/MsU1NTWa+ePFiM3/ttddisxMnTphjQ+68804zf+ONNxL9/CQyvXQ3X/mJnGL5iZxi+YmcYvmJnGL5iZxi+YmcYvmJnOL5/AXQ2dlp5pdfbl8B/ciRI2be0NAQm02cONEce+bMGTMPzf3w4cNm3r9//9jMWocHgHvvvdfMR40aZeZJ1/Itaa7j5wpf+YmcYvmJnGL5iZxi+YmcYvmJnGL5iZxi+YmcCq7zi8grAG4F0Kaqk6LbhgN4DcB4ALsAzFHVg/mb5vlt48aNZj5u3Dgzb25ujs1C192fPHmymYe20bbW8QGgpKQkNrPmDQCDBg0y8xtuuMHMX3jhhdgsdHxD6BoMoeMfeoNMXvn/AOCmH932CIAPVbUawIfR90TUiwTLr6qrAfz4n//ZAOqjr+sB3JHjeRFRnmX7mb9cVVuir/cCKM/RfIioQBIf26+qal2bT0QWAFiQ9HGIKLeyfeVvFZGxABD92RZ3R1WtU9VaVa3N8rGIKA+yLf9yAPOir+cBsLd6JaKiEyy/iCwB8DmAfxaR70TkfgDPAPiFiGwHcEP0PRH1IsHP/Kp6T0x0fY7n4lZtrf2JyFqvBoCHH344Njt16pQ5trGx0cxD+zrs3bvXzCsqKmKz66+3/wqF5rZmzRozD63lW8aPH2/mO3fuzPpnFwse4UfkFMtP5BTLT+QUy0/kFMtP5BTLT+QUL90dKS0tNfOTJ0/m7bG3b99u5q2trWZubbPd1hZ78CUAYOnSpWYeWi5buHChmdfV1cVmoctf33fffWb+7rvvmnkSSZfyBg4caObHjx9P9PNzga/8RE6x/EROsfxETrH8RE6x/EROsfxETrH8RE5xnT+Sz3X8kC+++MLMp0+fbubWWv769evNsa+//rqZh2zYsMHMR48eHZuVlZWZY0OX7q6urjbz0NwsImLmoeNCimEdP4Sv/EROsfxETrH8RE6x/EROsfxETrH8RE6x/EROcZ2/CBw+fNjM586da+b19fWxWWgb7Ntuu83MQ+f7jx071sytra5D1wrYsWOHmR89etTMkwit8/fr18/Mp06dauaffvrpOc8p1/jKT+QUy0/kFMtP5BTLT+QUy0/kFMtP5BTLT+RUcJ1fRF4BcCuANlWdFN32FID5APZFd3tUVVfka5KZGDx4sJkfOXLEzC+88EIzP3jw4DnPKVNTpkwx8z179pj5pEmTYrNrrrnGHLty5UozDwk9L01NTbHZxIkTzbEXXXSRmXd0dJh5EqGtyUPHGGzbts3MreMETpw4YY7NlUxe+f8A4KYebn9BVWui/1ItPhGdu2D5VXU1gAMFmAsRFVCSz/y/EZFNIvKKiNjvmYmo6GRb/pcAVAGoAdAC4HdxdxSRBSLSKCKNWT4WEeVBVuVX1VZV7VTVMwB+D+BK4751qlqrqrXZTpKIci+r8otI91O5fglgS26mQ0SFkslS3xIAMwCMFJHvADwJYIaI1ABQALsA2Ps0E1HRCZZfVe/p4ebFeZhLIiUlJYnGh86pz6cLLrDfgI0YMcLMrT0HTp06ZY5NevxCe3u7mR87diw2Cx2bEboWQU1NjZl//PHHZm4JrfOHtLS0JBpfCDzCj8gplp/IKZafyCmWn8gplp/IKZafyKnz5tLdBw7Y5x6FLsXc2dmZy+mcE+u0VwCYNm2amZeXl8dmoSWn0OnCIaHTajdu3BibffLJJ+bYJ554wsxDp80mEVp+DV26+4cffjDzvn3jq3f69GlzbK7wlZ/IKZafyCmWn8gplp/IKZafyCmWn8gplp/IqfNmnT8kdIpm6DiAJKd4WttUA8BVV11l5lu3bjVz67Td0GWgk556GnpeDh06lPXPHjhwoJmPHDnSzK21+tD24KG/D6HTkUPHPxRqLd/CV34ip1h+IqdYfiKnWH4ip1h+IqdYfiKnWH4ip9ys84ckvVSzxTp3GwA++OADM6+ttTc7Gjp0aGxWVVVljk2yDg+E18OHDBkSm82dOzfRz87n+fyhvw+hLd337dtn5sWAr/xETrH8RE6x/EROsfxETrH8RE6x/EROsfxETgXX+UWkEsCrAMoBKIA6VV0kIsMBvAZgPIBdAOaoarL9ns9TofP5GxoazPzSSy/NOreOAcgkDwntdzBx4sTYbPfu3ebYUaNGmfn9999v5i+99JKZW0Ln+994441m/tVXX2X92IWSySv/aQC/VdXLAEwF8GsRuQzAIwA+VNVqAB9G3xNRLxEsv6q2qOr66OsjALYCqAAwG0B9dLd6AHfka5JElHvn9JlfRMYD+DmABgDlqnr2GlB70fWxgIh6iYyP7ReRMgBLATykqoe7H3etqioiPR4MLSILACxIOlEiyq2MXvlFpARdxf+Tqv4lurlVRMZG+VgAbT2NVdU6Va1VVfvsFCIqqGD5peslfjGArar6fLdoOYB50dfzACzL/fSIKF8kg0taTwPwGYDNAM6ufzyKrs/9/wvgnwDsRtdSn7lPdtxHg0w9/vjjsdnTTz+d5EfnVf/+/c181apVZl5ZWWnmw4cPj83Wrl1rjl28eLGZL1myxMxDp93Onz8/NnvooYfMsaFtrsePH2/mI0aMMPMkQsu3aW75rqr2/5RI8DO/qq4BEPfDrj+XSRFR8eARfkROsfxETrH8RE6x/EROsfxETrH8RE71qkt3F/NaviW0TXbo1Nbm5mYznzlzZmwWOh14zZo1Zh4SWue/9tprY7Pq6mpzrLXFNgCsXr066/GhU3ZDko4vBnzlJ3KK5SdyiuUncorlJ3KK5SdyiuUncorlJ3KqV63zJxFaj87nFt0hK1asMPMnn3zSzAcOHBibhdbKy8vtSy82NTWZecjkyZNjs/b2dnNs6Hz80HUS8qlfv35m3tHRUaCZZI+v/EROsfxETrH8RE6x/EROsfxETrH8RE6x/EROuVnnz2B/gkTjk/zsIUOGmHnovPXS0tLYrKqqyhw7a9YsM29sbDTz0POybt262Gzq1Knm2ND24Tt27DDzfFq0aJGZL1y4sEAzyR5f+YmcYvmJnGL5iZxi+YmcYvmJnGL5iZxi+YmcCq7zi0glgFcBlANQAHWqukhEngIwH8C+6K6Pqqp9YnoRS7KOHzpnvm9f+2m+5JJLzHzChAlmPn369NistbXVHFtbW2vmIaHnbf/+/bFZRUVFose+5ZZbzDyf19ZPuo6fzz0FMpXJQT6nAfxWVdeLyGAA60Tk/Sh7QVWfy9/0iChfguVX1RYALdHXR0RkK4Bk/2QTUerO6TO/iIwH8HMADdFNvxGRTSLyiohcGDNmgYg0ioh9nCgRFVTG5ReRMgBLATykqocBvASgCkANut4Z/K6ncapap6q1qprswyUR5VRG5ReREnQV/0+q+hcAUNVWVe1U1TMAfg/gyvxNk4hyLVh+6TolbTGArar6fLfbx3a72y8BbMn99IgoXzL5bf+/AvgVgM0isiG67VEA94hIDbqW/3YBSPUcxjQvzR1amhk+fLiZ33rrrWa+cuVKM58xY0ZsNmzYMHNs6PLXSZ/X0aNHx2YDBgwwx3700Udmft1115l5MSuGLb4z+W3/GgA9/Q3otWv6RMQj/IjcYvmJnGL5iZxi+YmcYvmJnGL5iZw6by7dneYW2yFlZWVm/sADD5j5gQMHzHzVqlWx2YYNG2IzIHwcQNJLns+fPz8227Rpkzm2urrazMeMGWPmZOMrP5FTLD+RUyw/kVMsP5FTLD+RUyw/kVMsP5FTUsj1cRHZB2B3t5tGAmgv2ATOTbHOrVjnBXBu2crl3Map6qhM7ljQ8v/kwUUai/XafsU6t2KdF8C5ZSutufFtP5FTLD+RU2mXvy7lx7cU69yKdV4A55atVOaW6md+IkpP2q/8RJSSVMovIjeJyFciskNEHkljDnFEZJeIbBaRDWlvMRZtg9YmIlu63TZcRN4Xke3Rnz1uk5bS3J4SkeboudsgIrNSmluliHwsIn8VkS9F5MHo9lSfO2NeqTxvBX/bLyJ9AGwD8AsA3wFYC+AeVf1rQScSQ0R2AahV1dTXhEVkOoCjAF5V1UnRbf8J4ICqPhP9w3mhqv5bkcztKQBH0965OdpQZmz3naUB3AHgPqT43BnzmoMUnrc0XvmvBLBDVb9W1ZMA/gxgdgrzKHqquhrAj6/kMRtAffR1Pbr+8hRczNyKgqq2qOr66OsjAM7uLJ3qc2fMKxVplL8CQFO3779DcW35rQBWicg6EVmQ9mR6UB5tmw4AewGUpzmZHgR3bi6kH+0sXTTPXTY7Xucaf+H3U9NU9V8A3Azg19Hb26KkXZ/Zimm5JqOdmwulh52l/y7N5y7bHa9zLY3yNwOo7Pb9z6LbioKqNkd/tgF4E8W3+3Dr2U1Soz/bUp7P3xXTzs097SyNInjuimnH6zTKvxZAtYhcLCKlAO4GsDyFefyEiAyKfhEDERkE4EYU3+7DywHMi76eB2BZinP5B8Wyc3PcztJI+bkruh2vVbXg/wGYha7f+O8E8O9pzCFmXpcA2Bj992XacwOwBF1vA0+h63cj9wMYAeBDANsBfABgeBHN7Y8ANgPYhK6ijU1pbtPQ9ZZ+E4AN0X+z0n7ujHml8rzxCD8ip/gLPyKnWH4ip1h+IqdYfiKnWH4ip1h+IqdYfiKnWH4ip/4faRL+eQGrt/EAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAEDtJREFUeJzt3W+MVFWax/HfQ0ujgvJn0RYBZZwQlahhVsQlmhWyOrpGIpqokGjQnSy8GMyS7AuJG7PGVTPRmVn31URUMoxxnVkDE8xEYZRsFHUFQWdBwAEWQWj+iYyBVrBp+tkXfZn0aN/nNl1/bjXn+0k6VNVTp+qkml/fe+vcc4+5uwCkZ0DZHQBQDsIPJIrwA4ki/ECiCD+QKMIPJIrwA4ki/ECiCD+QqDPq+WZmxumEQI25u/XmeRVt+c3sFjP7o5ltM7MFlbwWgPqyvp7bb2ZNkrZIuknSbkkfSJrl7puCNmz5gRqrx5Z/sqRt7r7d3dsl/VrS7RW8HoA6qiT8oyXt6nZ/d/bYXzCzOWa21szWVvBeAKqs5l/4uftCSQsldvuBRlLJlr9V0thu98dkjwHoByoJ/weSxpvZ98ysWdJMSa9Wp1sAaq3Pu/3u3mFm8yStkNQkaZG7b6xaz9AvmMVfLHOlqMbV56G+Pr0Zx/ynHcLfeOpykg+A/ovwA4ki/ECiCD+QKMIPJIrwA4mq63x+4FScc845Yb2trS2sM8wYY8sPJIrwA4ki/ECiCD+QKMIPJIrwA4liVh9qqrm5ObfW3t4eth0wIN42Ra8tSUOHDs2t7d+/P2zbnzGrD0CI8AOJIvxAogg/kCjCDySK8AOJIvxAohjnR7/V1NTU53rROQb9GeP8AEKEH0gU4QcSRfiBRBF+IFGEH0gU4QcSVdE4v5ntkHRE0glJHe4+qeD5jPMDNdbbcf5qXLd/mrsfrMLrAKgjdvuBRFUafpf0ezNbZ2ZzqtEhAPVR6W7/9e7eambnS3rDzD5x97e7PyH7o8AfBqDBVG1ij5k9KqnN3X8aPIcv/IAaq/nEHjMbbGbnnLwt6YeSPu7r6wGor0p2+1sk/dbMTr7Of7r78qr0CkDNMZ8fKEG0JkFnZ2dFr818fgAhwg8kivADiSL8QKIIP5Aowg8kqhqz+gCcokqH86qBLT+QKMIPJIrwA4ki/ECiCD+QKMIPJIrwA4linB+haOppb4wZMya39vnnn4dtjx49WtF7I8aWH0gU4QcSRfiBRBF+IFGEH0gU4QcSRfiBRDHOfxrI1k7oUdGl2SdOnBjWhwwZEtZvu+22sD537tzc2kMPPRS2Xbp0aVg/eLB2i0M3NTWF9enTp4f16667LqwPHTo0tzZv3rywbXt7e1jvLbb8QKIIP5Aowg8kivADiSL8QKIIP5Aowg8kqnCc38wWSbpN0gF3vyJ7bISk30gaJ2mHpLvd/U+16yYi0Vj+jTfeGLZ94403wvqRI0fC+qBBg8J6c3Nzbu3QoUNh2yVLloT1HTt2hPVXXnklt7Zt27aw7XPPPRfWi65FsHHjxrD+5ptv5tYGDx4ctq3nOP8vJd3yrccWSFrp7uMlrczuA+hHCsPv7m9L+vaf6NslLc5uL5Y0o8r9AlBjfT3mb3H3vdntfZJaqtQfAHVS8bn97u5mlnvQaWZzJM2p9H0AVFdft/z7zWyUJGX/Hsh7orsvdPdJ7j6pj+8FoAb6Gv5XJc3Obs+WtKw63QFQL4XhN7OXJf2PpEvNbLeZ/UjSTyTdZGZbJd2Y3QfQjxQe87v7rJzS31W5L6etaL69VDznPhorL2p/3333hW03b94c1seNGxfW33vvvbA+bNiwsB5Zvnx5WF+wIB5hnjBhQm5t4MCBYdtdu3aF9dGjR4f1xx57LKwfP348t3bs2LGwbbVwhh+QKMIPJIrwA4ki/ECiCD+QKMIPJMqKhpmq+mbBacD9WdFQ3tlnnx3Wi5aivuiii8J6dBnpJ598Mmy7aNGisF40dTWamipJ06ZNy60dPnw4bNva2hrWr7zyyrC+b9++3Nr9998ftn3xxRfDetEw5J49e8J69HspuqR5Z2dnbs3d5e7xf8gMW34gUYQfSBThBxJF+IFEEX4gUYQfSBThBxLFEt1VUDTOf+edd4b1nTt3hvWxY8eG9Rkz8q+f+vXXX4dti6auFo21T548Oazfe++9ubW1a9eGbS+77LKwHo3jS9KaNWtya5999lnYtuiy4VOmTAnrN9xwQ1h/5plncmsjR44M20ZLk584cSJs2x1bfiBRhB9IFOEHEkX4gUQRfiBRhB9IFOEHEsV8/kzRnPto2eRrr702bPvEE0+E9VGjRoX1559/PqzPnDkzt1Y0Z/6TTz4J61988UVYf+edd8L6+PHjc2vz588P25555plh/csvvwzrUd9fe+21sO2kSfECU0XndqxYsSKsR+covPvuu2HbaGnyjo4O5vMDiBF+IFGEH0gU4QcSRfiBRBF+IFGEH0hU4Ti/mS2SdJukA+5+RfbYo5L+UdLJi7o/7O7xwKkqH+ePxlYHDIj/jj344INhfdasvJXIu7z00ku5tWjOuiQNGjQorBfNLT/jjPiyC5s2bcqtXXjhhWHbojnzReP827dvD+tTp07NrR05ciRse9VVV1X03iNGjMitFY3TF11rIDrvQypeury9vT23VrRWwrPPPhu+bmdnZ9XG+X8p6ZYeHv93d5+Y/RQGH0BjKQy/u78t6VAd+gKgjio55p9nZuvNbJGZDa9ajwDURV/D/wtJ35c0UdJeST/Le6KZzTGztWYWH0QBqKs+hd/d97v7CXfvlPScpNyrOLr7Qnef5O7xTAkAddWn8JtZ92lod0j6uDrdAVAvhZfuNrOXJU2VNNLMdkv6V0lTzWyiJJe0Q9LcGvYRQA0Uht/dexoAf6Evb2Zm4Zj11VdfHbbfvXt3bm3lypVh26Jr40+YMCGsR+PlH330Udh2+vTpYf38888P6xs3bgzr0Xz+YcOGhW3POuussL5s2bKwXnSeyFdffZVbKxorj+atS9K5554b1tetW5dbK7oGQ9Hn9tZbb4X16DoGkrR48eLcWtE1GKJr85/K9Tk4ww9IFOEHEkX4gUQRfiBRhB9IFOEHElXXJbqbmpo0dOjQ3PrTTz8dtr/mmmtya998803Y9p577gnrLS0tYf3999/PrV1++eVh26Jlso8dOxbWP/3007AeLQFeNCQVDZ9K0uuvvx7Wp02bFta3bduWW9u6dWvYtuhzGzhwYFi/+eabc2t79uwJ265atSqsF/X94osvDuvRJdOPHz8etm1ubs6tdXZ2hm27Y8sPJIrwA4ki/ECiCD+QKMIPJIrwA4ki/ECiGmqJ7qLppZdeemlu7amnngrbDh8eX2Zww4YNYf2SSy7JrRVNLV2+fHlYP++888L6I488EtbvuOOO3FqlS2yPHTs2rBctk33w4MHc2tGjR8O2U6ZMCetFv7No6mvRNOu2trawHp2vInWd0xJZvXp1bu3xxx8P20aXim9ra1NHRwdLdAPIR/iBRBF+IFGEH0gU4QcSRfiBRBF+IFENNc5fS0VLeBfNDb/gggtya3fddVfYds2aNWH90KF4HdRdu3aF9ehy6EXXKRg5cmRYX79+fVgvGs/u6OjIrRVdx6Dod1b03tHlt6Plu3uj6DoIDzzwQFiPLjVfdB2DLVu2hHV3Z5wfQD7CDySK8AOJIvxAogg/kCjCDySK8AOJKhznN7Oxkn4lqUWSS1ro7v9hZiMk/UbSOEk7JN3t7n8qeK2GHecvut65Wa+GTnsUjcP3RjQvXYr7VvT7LaoXfW5FfUP9VXOcv0PSP7v7BEl/I+nHZjZB0gJJK919vKSV2X0A/URh+N19r7t/mN0+ImmzpNGSbpe0OHvaYkkzatVJANV3Ssf8ZjZO0g8krZbU4u57s9I+dR0WAOgnen0wamZDJC2RNN/dD3c/znR3zzueN7M5kuZU2lEA1dWrLb+ZDVRX8F9y96XZw/vNbFRWHyXpQE9t3X2hu09y90nV6DCA6igMv3Vt4l+QtNndf96t9Kqk2dnt2ZKWVb97AGqlN0N910taJWmDpJPjYQ+r67j/vyRdJGmnuob6wrmpZQ71Aano7VBfMvP5gVQwnx9AiPADiSL8QKIIP5Aowg8kivADiapsrikaQiXTjWs91FvJdGPUFlt+IFGEH0gU4QcSRfiBRBF+IFGEH0gU4QcSxTj/aaCRx8sbuW+pY8sPJIrwA4ki/ECiCD+QKMIPJIrwA4ki/ECiGOc/DTBnHn3Blh9IFOEHEkX4gUQRfiBRhB9IFOEHEkX4gUQVht/MxprZf5vZJjPbaGb/lD3+qJm1mtkfsp9ba99d9MTMcn+APFZ0EoiZjZI0yt0/NLNzJK2TNEPS3ZLa3P2nvX4zM844qYEBA/L/hnd2dtaxJ2gE7t6rv/qFZ/i5+15Je7PbR8xss6TRlXUPQNlO6ZjfzMZJ+oGk1dlD88xsvZktMrPhOW3mmNlaM1tbUU8BVFXhbv+fn2g2RNJbkp5w96Vm1iLpoCSX9G/qOjT4h4LXYLe/BtjtR3e93e3vVfjNbKCk30la4e4/76E+TtLv3P2Kgtch/DVA+NFdb8Pfm2/7TdILkjZ3D372ReBJd0j6+FQ7CaA8vfm2/3pJqyRtkHRyM/KwpFmSJqprt3+HpLnZl4PRa7HlP82MHh1/99va2lqnnuCkan7b/46knl7stVPtFIDGwRl+QKIIP5Aowg8kivADiSL8QKIIP5CoXp/eW5U3Y5wfqLmqneEH4PRE+IFEEX4gUYQfSBThBxJF+IFEEX4gUfVeovugpJ3d7o/MHmtEjdq3Ru2XRN/6qpp9u7i3T6zrST7feXOzte4+qbQOBBq1b43aL4m+9VVZfWO3H0gU4QcSVXb4F5b8/pFG7Vuj9kuib31VSt9KPeYHUJ6yt/wASlJK+M3sFjP7o5ltM7MFZfQhj5ntMLMN2crDpS4xli2DdsDMPu722Agze8PMtmb/9rhMWkl9a4iVm4OVpUv97Bptxeu67/abWZOkLZJukrRb0geSZrn7prp2JIeZ7ZA0yd1LHxM2s7+V1CbpVydXQzKzpyQdcvefZH84h7v7Qw3St0d1iis316hveStL368SP7tqrnhdDWVs+SdL2ubu2929XdKvJd1eQj8anru/LenQtx6+XdLi7PZidf3nqbucvjUEd9/r7h9mt49IOrmydKmfXdCvUpQR/tGSdnW7v1uNteS3S/q9ma0zszlld6YHLd1WRtonqaXMzvSgcOXmevrWytIN89n1ZcXrauMLv++63t3/WtLfS/pxtnvbkLzrmK2Rhmt+Ien76lrGba+kn5XZmWxl6SWS5rv74e61Mj+7HvpVyudWRvhbJY3tdn9M9lhDcPfW7N8Dkn6rrsOURrL/5CKp2b8HSu7Pn7n7fnc/4e6dkp5TiZ9dtrL0EkkvufvS7OHSP7ue+lXW51ZG+D+QNN7MvmdmzZJmSnq1hH58h5kNzr6IkZkNlvRDNd7qw69Kmp3dni1pWYl9+QuNsnJz3srSKvmza7gVr9297j+SblXXN/7/J+lfyuhDTr8ukfS/2c/Gsvsm6WV17QYeV9d3Iz+S9FeSVkraKulNSSMaqG8vqms15/XqCtqokvp2vbp26ddL+kP2c2vZn13Qr1I+N87wAxLFF35Aogg/kCjCDySK8AOJIvxAogg/kCjCDySK8AOJ+n9jc42c3nkq+AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAE2tJREFUeJzt3WtslWW2B/D/slBA5FIuLRUakWsFgvSkkmMkRuOZCTWTwPhBBpMJkxiZD2NyJtF4jOeDfjTmoDHGjOkcyYAZmTnJYLyEzFHxSnIACypyGUGQW6Gt3MvFQuk6H/oyqdp3rbqvb13/X0Jo95+n++mGxd7d632eR1QVRBTPdeWeABGVB4ufKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwqKxU8U1JBS3pmI8HLCIhCR1Kyqqsoce/Hixbzue+jQoTnnp06dyuu+qX+qmv4Poo+8il9EFgN4HkAFgP9W1afz+XqUm+HDh6dmTU1N5tht27aZeUVFhZlXV1eb+aRJk1KzdevWmWOpuHJ+2S8iFQBeBNAEYA6A5SIyp1ATI6Liyudn/oUAvlLVA6p6GcBfACwpzLSIqNjyKf7JAI70+fxoctt3iMhKEWkRkZY87ouICqzob/ipajOAZoBv+BFlST7P/K0A6vp8PiW5jYgGgXyK/xMAM0XkZhGpBPArAG8UZlpEVGw5v+xX1W4ReRjA/6K31bdaVXcVbGaBXHed/X+w126zevkzZswwxz777LNm7rXyNm/ebOabNm1KzdavX2+OvXLlipl7u1BxlypbXj/zq+oGABsKNBciKiFe3ksUFIufKCgWP1FQLH6ioFj8REGx+ImCKul6/sHM68VbhgyxH+aampq88m+//TY127Nnjzl269atZu6t19+wwe70tre3p2YNDQ3m2Lq6OjN/8803zfzy5cupGa8R4DM/UVgsfqKgWPxEQbH4iYJi8RMFxeInCkpK2dIYzDv5WDvkzp071xx79epVM7dadQDwzTffmLnVCvS25r7lllvM3PvePvjgAzO3vrdz586ZY8+ePWvmXhuysrIyp3kBwOnTp808y63AgW7dzWd+oqBY/ERBsfiJgmLxEwXF4icKisVPFBSLnygo9vkT3vbY9fX1qdn48ePNsYcPHzbzrq4uM/e+/pkzZ1Iza1kr4F+D0NPTY+bW9Q8AMGLEiNTM67V79+3927X+TqdMmWKO3b59u5l3d3ebeTmxz09EJhY/UVAsfqKgWPxEQbH4iYJi8RMFxeInCiqvrbtF5CCATgBXAXSramMhJlUMInbr0+v7zpo1KzXz1pWfOnXKzL11621tbWZu9bu9Y66979tb1+7lVj/cm9vChQvNfP/+/WZufX1vK/ZRo0aZufd9DwaF2Lf/blU9UYCvQ0QlxJf9REHlW/wK4G0R2SYiKwsxISIqjXxf9i9S1VYRqQbwjoj8Q1U/6vsHkv8U+B8DUcbk9cyvqq3J7x0AXgPwg3doVLVZVRuz/GYgUUQ5F7+IjBSRUdc+BvBzADsLNTEiKq58XvbXAHgtaaENAfCqqv69ILMioqLLufhV9QCAWws4l6Ly1n57a8vHjBmTmnl713u8+54wYYKZW+v5vX0KvDXzly5dMvPq6moztx537/qIlpYWM585c6aZX7hwITVrbW01x+azhwKQ7X39r2GrjygoFj9RUCx+oqBY/ERBsfiJgmLxEwVViFV9PwneFtbWMdm33mp3PA8ePGjm7e3tZt7Z2WnmVlvJaxPefvvtZr5+/Xozt47BBuw2pjfWa1MeO3bMzEePHp3z1/a25h4MrTwPn/mJgmLxEwXF4icKisVPFBSLnygoFj9RUCx+oqDY50+MHDnSzA8dOpSaWcd3D+Rre0tbvW3H58yZk5p9+eWX5ljvKGqvH25d/wAANTU1qdn58+fNsWPHjjXzG264wcytr28t0Qb8pcrWvwdgcFwHwGd+oqBY/ERBsfiJgmLxEwXF4icKisVPFBSLnygo9vkTdXV1Zm71s7u6usyxXp/eWncO+NcJdHR05Py1vX6214vP53s7ccI+3NnLr7/+ejOfPXt2arZv3z5z7MmTJ8083/0AsoDP/ERBsfiJgmLxEwXF4icKisVPFBSLnygoFj9RUG6fX0RWA/gFgA5VnZfcNg7AXwFMBXAQwP2qerp408yf14/2jsm2evmPPfaYOba5udnMvX62d6bAzTffnJp56869fnU+a+YBoK2tLTW78cYbzbHHjx83c+/48GHDhqVmEydONMceOXIk568N/HT6/H8CsPh7tz0OYKOqzgSwMfmciAYRt/hV9SMAp7538xIAa5KP1wBYWuB5EVGR5fozf42qXntN1gYgfa8mIsqkvK/tV1UVkdQNy0RkJYCV+d4PERVWrs/87SJSCwDJ76krS1S1WVUbVbUxx/sioiLItfjfALAi+XgFgNcLMx0iKhW3+EVkHYD/AzBbRI6KyIMAngbwMxHZB+Dfks+JaBBxf+ZX1eUp0T0FnktReXvj796928yHDx+emm3evNkc6/WEvWsMJk+ebOY9PT2pmbee3+vTjxo1ysy96yesaxSuXLlijp07d66Znzt3zsyrqqpSs4aGBnPsqlWrzNz7OxsMeIUfUVAsfqKgWPxEQbH4iYJi8RMFxeInCirM1t3estj58+eb+enT6SuWveOevS2mz5w5Y+Zeu238+PGpmbdc2Gv1eVuae0t+a2trUzPveG9vObJn0aJFqdmpU99fq/ZdXmvYa99evHjRzLOAz/xEQbH4iYJi8RMFxeInCorFTxQUi58oKBY/UVBh+vzXXWf/P9fe3m7m06ZNS828nq53DYHXU/aWj1pLV72tuW+77TYzP3z4sJl3dnaa+ZAh6f/EvO2zrW2/Af8aA2sZ9ttvv22OvXz5spl7y5EHAz7zEwXF4icKisVPFBSLnygoFj9RUCx+oqBY/ERBhenzW/1mAHjggQfM3Nom2ltv761b99aGe2vqrfufMGGCOXbkyJFm7h2jvWXLFjO3vvf6+npzrDc3b58E6/qIcePGmWO9x9zbH8LbLyAL+MxPFBSLnygoFj9RUCx+oqBY/ERBsfiJgmLxEwXl9vlFZDWAXwDoUNV5yW1PAXgIwLUm7hOquqFYkywEa203AHz++edmfuTIkdTM23d/8eLFZu71+U+ePGnm1tpzb6zXr/bWtU+ZMsXMrTX51dXV5lhv7t71Ey0tLamZdZ4AAOzatcvMvWsMfip9/j8B6O9f73OquiD5lenCJ6IfcotfVT8CkP3/xojoR8nnZ/6HRWSHiKwWkfR9pIgok3It/j8AmA5gAYDjAFal/UERWSkiLSKS/gMYEZVcTsWvqu2qelVVewD8EcBC4882q2qjqjbmOkkiKrycil9E+r5V+ksAOwszHSIqlYG0+tYBuAvABBE5CuBJAHeJyAIACuAggN8WcY5EVARu8avq8n5ufrkIcymq7u5uM+/p6TFzVU3NJk2aZI5dvXq1mXv9bG9ux44dy3ns3r17zXz06NFm7p2HYOXWHgmAf42BdyaB1Wv31vM3NTWZ+dq1a818MOAVfkRBsfiJgmLxEwXF4icKisVPFBSLnyioMFt333TTTWZ+/vx5M7faTvv37zfHnjhxwswrKytzvm/Abnl5rThvSe/p06fN3FuWay3p9bZT91p53tHl+di9e3fRvnZW8JmfKCgWP1FQLH6ioFj8REGx+ImCYvETBcXiJwoqTJ/f67WPGTPGzK3ttadPn26O7ezsNPOjR4+a+bx588y8vb09Nbtw4YI51ruGwDui29ui2vr6+/btM8eOHTvWzL2tu61rHLx5t7a2mrl39Pnhw4fN3FoiXip85icKisVPFBSLnygoFj9RUCx+oqBY/ERBsfiJggrT5/e2154/f76Zv//++6mZd1zzoUOHzHz8+PFm7q2pr6mpSc2+/vprc6y3z8GOHTvMfNasWWZuHeHt7TXgbe3t7UXw8ccfp2bLli0zx3Z1dZn51q1bzVxEzJx9fiIqGxY/UVAsfqKgWPxEQbH4iYJi8RMFxeInCsrt84tIHYC1AGoAKIBmVX1eRMYB+CuAqQAOArhfVe2GdBlZx1gD/pr6+vr61GzGjBnmWKvXDQAHDhww88bGRjPfvn17atbQ0GCO9fYx8NbUe/3qs2fPpmZVVVXmWG9ffq/Pb+1z4F17Yc0b8K+9yEIf3zOQZ/5uAI+o6hwA/wrgdyIyB8DjADaq6kwAG5PPiWiQcItfVY+r6vbk404AewBMBrAEwJrkj60BsLRYkySiwvtRP/OLyFQADQC2AKhR1eNJ1IbeHwuIaJAY8LX9InIDgL8B+L2qnut77bKqqoj0+0OOiKwEsDLfiRJRYQ3omV9EhqK38P+squuTm9tFpDbJawF09DdWVZtVtVFV7XetiKik3OKX3qf4lwHsUdVn+0RvAFiRfLwCwOuFnx4RFctAXvbfAeDXAL4Qkc+S254A8DSA/xGRBwEcAnB/caZYGN4STW8rZ6vVt2fPHnPsp59+auZeO81bPmrN/eLFi+bYfLfu7unpyXm897U7Ovp9MflPU6dONXPre7vzzjvNsS0tLWbutfq2bNli5lngFr+qbgKQtjj5nsJOh4hKhVf4EQXF4icKisVPFBSLnygoFj9RUCx+oqDCbN3d3d1t5idPnjRz6zhob3mot7xz4sSJZu4tux09enRqdubMGXNsRUWFmXtHTQ8fPtzMhw4dmpp5j7n3d+Z9b5WVlanZiBEjzLHetRV79+41c27dTUSZxeInCorFTxQUi58oKBY/UVAsfqKgWPxEQYXp83t93dmzZ5v5k08+mZqtXbvWHOttzW1dQwD4R1lb25Jb1wAAfp/eG+8dT27Nzbt+obq62sy97datPv+FCxfMsd61F97+D971EVnAZ36ioFj8REGx+ImCYvETBcXiJwqKxU8UFIufKKgwfX5vbbjXM37uuedSs7vvvtsc+8orr5j59OnTzXzy5Mlmbq1rr62tNcd6x4d7j5t3DcOcOXNSM2/Nu7fev6bGPh7SOmbbOxPAu2/v+HDvupJLly6ZeSnwmZ8oKBY/UVAsfqKgWPxEQbH4iYJi8RMFxeInCsrt84tIHYC1AGoAKIBmVX1eRJ4C8BCAa4vRn1DVDcWaaL6s/eMBYNiwYWY+duzY1OzFF180x3pn2FvrzgG/lz5p0qTUzOtX33HHHWb+1ltvmXlVVZWZW9dPeL3wq1evmrm3pt66/sHbK2Dp0qVmvmbNGjP39mjIgoFc5NMN4BFV3S4iowBsE5F3kuw5Vf2v4k2PiIrFLX5VPQ7gePJxp4jsAWBfckZEmfejfuYXkakAGgBsSW56WER2iMhqEen39Z+IrBSRFhFpyWumRFRQAy5+EbkBwN8A/F5VzwH4A4DpABag95XBqv7GqWqzqjaqamMB5ktEBTKg4heRoegt/D+r6noAUNV2Vb2qqj0A/ghgYfGmSUSF5ha/9C69ehnAHlV9ts/tfZeL/RLAzsJPj4iKZSDv9t8B4NcAvhCRz5LbngCwXEQWoLf9dxDAb4sywwLxtqh+9NFHzfzVV19Nzbztrb2lp01NTWbubQNtLU/1luTu37/fzL0trKdNm2bm1uNeX19vjvXm5hk3blxq9uGHH5pjve/La0Nm4Qhuz0De7d8EoL+F15nt6RORj1f4EQXF4icKisVPFBSLnygoFj9RUCx+oqCklP1IESlb87OiosLMGxoazPyhhx5KzR555BFz7KJFi8x89+7dZn7XXXeZubV89L777jPHtrW1mfl7771n5suWLTNza25dXV3m2HfffdfM77nnHjN/4YUXUrNnnnnGHPvSSy+Z+c6d9jVt3vUVxaSq9p7oCT7zEwXF4icKisVPFBSLnygoFj9RUCx+oqBY/ERBlbrP/w2AQ31umgDgRMkm8ONkdW5ZnRfAueWqkHO7SVXtTRgSJS3+H9y5SEtW9/bL6tyyOi+Ac8tVuebGl/1EQbH4iYIqd/E3l/n+LVmdW1bnBXBuuSrL3Mr6Mz8RlU+5n/mJqEzKUvwislhEvhSRr0Tk8XLMIY2IHBSRL0Tks3IfMZYcg9YhIjv73DZORN4RkX3J7/YxuaWd21Mi0po8dp+JyL1lmludiLwvIrtFZJeI/Htye1kfO2NeZXncSv6yX0QqAOwF8DMARwF8AmC5qtqL2ktERA4CaFTVsveEReROAOcBrFXVecltzwA4papPJ/9xVqnqf2Rkbk8BOF/uk5uTA2Vq+54sDWApgN+gjI+dMa/7UYbHrRzP/AsBfKWqB1T1MoC/AFhShnlknqp+BOD7h9AvAXDtcPg16P3HU3Ipc8sEVT2uqtuTjzsBXDtZuqyPnTGvsihH8U8GcKTP50eRrSO/FcDbIrJNRFaWezL9qEmOTQeANgD2cUCl557cXErfO1k6M49dLideFxrf8PuhRar6LwCaAPwueXmbSdr7M1uW2jUDOrm5VPo5WfqfyvnY5XridaGVo/hbAdT1+XxKclsmqGpr8nsHgNeQvdOH268dkpr8nn5QX4ll6eTm/k6WRgYeuyydeF2O4v8EwEwRuVlEKgH8CsAbZZjHD4jIyOSNGIjISAA/R/ZOH34DwIrk4xUAXi/jXL4jKyc3p50sjTI/dpk78VpVS/4LwL3ofcd/P4D/LMccUuY1DcDnya9d5Z4bgHXofRl4Bb3vjTwIYDyAjQD2AXgXwLgMze0VAF8A2IHeQqst09wWofcl/Q4AnyW/7i33Y2fMqyyPG6/wIwqKb/gRBcXiJwqKxU8UFIufKCgWP1FQLH6ioFj8REGx+ImC+n9rXNNuo9BFwgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAErpJREFUeJzt3Xts1GW6B/DvI7TcsdwsKBU4iJdFTCEVL+DlqBDAVSQmCsYTTiTLRvGyySZiMOZIzEmUHBfXSzAlIBdXdk28ALpw4KCBQ4KGghxQsQsiuIVSQBYotAiU5/zRH+dU7e95h5np/KY8309COp3vvDNvhz79zcz7e99XVBVE5M9FSXeAiJLB4idyisVP5BSLn8gpFj+RUyx+IqdY/EROsfiJnGLxEznVNpcPJiI8nZCohamqpHK7jI78IjJGRCpFZKeIPJPJfRFRbkm65/aLSBsAfwMwCkAVgI0AJqnq10YbHvmJWlgujvzDAexU1V2qegrAnwGMz+D+iCiHMin+ywD8vcn3VdF1PyEiU0WkQkQqMngsIsqyFv/AT1XLAZQDfNlPlE8yOfLvBVDS5Pu+0XVE1ApkUvwbAQwSkQEiUghgIoBl2ekWEbW0tF/2q+oZEXkcwH8CaANgvqp+lbWeEbUgEfsDcQ8rXKU91JfWg/E9P+WJC7n4c3KSDxG1Xix+IqdY/EROsfiJnGLxEznF4idyKqfz+an1uegi+/jQtq39K3T11VfHZk899ZTZduXKlWbes2dPM58zZ05s1rFjR7PtvHnzzLympsbMp0+fbuYnT54081zgkZ/IKRY/kVMsfiKnWPxETrH4iZxi8RM5xVl9kc2bN5v5sGHDctST7Jo0aZKZjx9vL7v44IMPZrM7PxH63Tt69KiZFxUVZbM7OXXmzJnYrKCgIKP75qw+IjKx+ImcYvETOcXiJ3KKxU/kFIufyCkWP5FTHOe/wJ0+fdrM27RpY+aZrnJ79uzZ2Ky+vt5s++mnn5r5rFmzzHzt2rWx2YkTJ8y2nTp1MvPQVOcQa5y/V69eZtsjR46YOcf5icjE4idyisVP5BSLn8gpFj+RUyx+IqdY/EROZbR0t4jsBlALoAHAGVUty0anKHtC4/iZCp0HYNm1a5eZb9q0ycyrqqrM3DrHwTr/AADq6urMvHPnzmYesn///tgstKT5zJkzM3rsc7Kxbv8/q+qhLNwPEeUQX/YTOZVp8SuAVSKySUSmZqNDRJQbmb7sH6mqe0XkEgCrReQbVV3X9AbRHwX+YSDKMxkd+VV1b/T1AIAPAAxv5jblqlrGDwOJ8kvaxS8inUSky7nLAEYD+DJbHSOilpXJy/5iAB9EQz1tAbyjqva2qkSUNzifPwesbaoB4Jtvvsno/svK4t9RLVu2zGzbu3dvM89kHB+w5/tbc9pTkcn69g0NDWYe+rkznc//3XffxWajR4822+7cudPMOZ+fiEwsfiKnWPxETrH4iZxi8RM5xeIncopDfXmgsLDQzE+dOpX2fZ88edLM27Vrl/Z9A+HhOmub7a5du5pta2trzbx79+5mbglt/33s2DEzLykpMfPQlOHLL788Ntu3b5/ZNlSzHOojIhOLn8gpFj+RUyx+IqdY/EROsfiJnGLxEzmVjdV7KUOZjOMDwF133ZWlnvzS8ePHzbxjx45mbk27DS0rHhrHD413r1+/PjZbsmSJ2fbmm28283vvvdfMu3TpYubW/3muzr3hkZ/IKRY/kVMsfiKnWPxETrH4iZxi8RM5xeIncorz+S8A1jLTofn2oSWqQ9tkDxkyxMzbto0/laS+vt5sG1pr4KGHHjLz2bNnp33foXMMPvzwQzOfMGGCmd9www2xWUVFhdk2hPP5icjE4idyisVP5BSLn8gpFj+RUyx+IqdY/EROBefzi8h8AL8GcEBVr42u6w7gLwD6A9gN4AFV/UfLdZMsQ4cOjc1Onz5ttg1tc21t/50Ka/370DbXe/fuNfPFixebufWzh37u0FoD1jg9ED5/oq6uzsxzIZUj/wIAY3523TMA1qjqIABrou+JqBUJFr+qrgNw+GdXjwewMLq8EMB9We4XEbWwdN/zF6tqdXR5P4DiLPWHiHIk4zX8VFWtc/ZFZCqAqZk+DhFlV7pH/hoR6QMA0dcDcTdU1XJVLVPVzD45IqKsSrf4lwGYHF2eDGBpdrpDRLkSLH4RWQJgA4CrRKRKRKYAeBHAKBHZAeCu6HsiakU4n/8CYI0ph/aZD63L37t377T6dM7GjRtjs9LSUrNtQ0ODmR84EPtuE4A9Z79Xr15m29A5CKHnLeTJJ5+Mzd56662M7pvz+YnIxOIncorFT+QUi5/IKRY/kVMsfiKnuEX3BcAarj148KDZ9sSJE2YeGuoLDRVbw22VlZVm28GDB5t5165dzdwaCgwN1Q0cONDMQ0uiFxUVmbm1pHmu8MhP5BSLn8gpFj+RUyx+IqdY/EROsfiJnGLxEzmV/GAjBYWWgbbG4kNLd4fGozds2GDms2bNMvNXXnklNlu+fLnZtn///mYe6nuHDh1is4cffths+84775h5aOvyW2+91cz37dtn5rnAIz+RUyx+IqdY/EROsfiJnGLxEznF4idyisVP5BSX7s4DoWWir7jiCjN/4oknYrPQNtePPPKImYe2wd6zZ4+ZX3fddbHZpZdearZt3769md9zzz1mfurUqdhs+/btZtt+/fqZ+YIFC8x88uTJZm4pLra3vgzVLJfuJiITi5/IKRY/kVMsfiKnWPxETrH4iZxi8RM5FRznF5H5AH4N4ICqXhtd9zyA3wA4tyj8DFX9a/DBOM7frG7dupl56DyAzz//PDYLrW2/a9cuM//oo4/MfPr06WbeuXPn2Cy09v3hw4fNvEuXLmZuPW+hNRLatGlj5imMtZu5tW7/3XffbbZdsWJF6LGzNs6/AMCYZq6fraql0b9g4RNRfgkWv6quA2D/CSaiVieT9/yPi8hWEZkvIvbrViLKO+kW/xwAAwGUAqgG8HLcDUVkqohUiEhFmo9FRC0greJX1RpVbVDVswDmAhhu3LZcVctUtSzdThJR9qVV/CLSp8m3EwB8mZ3uEFGuBJfuFpElAG4H0FNEqgD8G4DbRaQUgALYDeC3LdhHImoBnM+fImtcuKCgwGw7d+5cM3/sscfMvLy83MwHDRoUmw0ZMsRsu3r1ajO/7bbbzPzo0aNmbs3ZD+0pEBprDz3vO3fujM1Onjxptu3evbuZ9+jRw8xDfbN89tlnZj5ixAgz53x+IjKx+ImcYvETOcXiJ3KKxU/kFIufyKlWtUW3NT30+PHjZtvQFM6hQ4ea+RdffBGbhaaWvvbaa2be0NBg5gcPHjTzs2fPxmYDBw40286YMcPMp02bZuYrV640c2uL7zfffNNsO2ZMc5NJ/19oKNBa8jw01BcaqrOWBQfCw5gff/xxbLZ06VKzbbbwyE/kFIufyCkWP5FTLH4ip1j8RE6x+ImcYvETOdWqpvRaSz1ffPHFZtvQEta1tbVmPnbs2NgstIX2kSNHzDw0zv/yy7GrpAEA6uvrY7MdO3aYbYuKisz8hx9+MPNrrrnGzK1zII4dO2a2DS1Zbp33Adhj+aHfhwEDBpi5tVw6AFxyySVmbgltex7qG6f0EpGJxU/kFIufyCkWP5FTLH4ip1j8RE6x+Imcyqv5/KFx3VmzZsVm999/v9m2tLTUzEPnO8ybNy82e/vtt822obHy0Hz/W265xcwXL14cm1lz/YHwfP/QOQyhee11dXWxWWhOvbXsNxA+f2Lt2rWx2cyZM822U6ZMMfPQeQLbt283c+schUzOETgfPPITOcXiJ3KKxU/kFIufyCkWP5FTLH4ip1j8RE4Fx/lFpATAIgDFABRAuar+UUS6A/gLgP4AdgN4QFX/Yd1XSUkJnn766dg8tFX1jz/+GJsVFhaabb///nsz79evn5kfOnQoNqupqTHb9uzZ08yXL19u5p988omZW2POof0KQs9byP79+828b9++sVmob9b6DQDQsWNHM7f2WlizZo3Ztn379mYeOncj9H/erl272KxDhw5mW+vcjKqqKrNtU6kc+c8A+L2q/grAjQCmicivADwDYI2qDgKwJvqeiFqJYPGrarWqbo4u1wLYDuAyAOMBLIxuthDAfS3VSSLKvvN6zy8i/QEMBfA5gGJVrY6i/Wh8W0BErUTKxS8inQG8B+B3qvqTxde08cT4Zk+OF5GpIlIhIhWh/fSIKHdSKn4RKUBj4f9JVd+Prq4RkT5R3gfAgebaqmq5qpapallowUUiyp1g8UvjR7LzAGxX1T80iZYBmBxdngwgN1uLElFWpDKldwSAfwGwTUS2RNfNAPAigHdFZAqAPQAeCN1RfX09tm7dGpuHpvRawy+VlZVm21dffdXMremfgL189qhRo8y2GzduNPPQlN2bbrrJzK2tqkNDWlu2bDHzIUOGmPkdd9xh5u+++25sduWVV5ptX3/9dTPftm2bmVt9f+GFF8y2oWHn/v37m/m3335r5ldddVVsFppebg2Bns9S/MHiV9X1AOIGZO9M+ZGIKK/wDD8ip1j8RE6x+ImcYvETOcXiJ3KKxU/kVE6X7j5x4oQ55r1+/XqzvTUuHFpKecOGDWYeOvvQWho8NOY7cuRIM7/xxhvNfOXKlWa+aNGi2KxHjx5m29DW5qGtqENLVFtCW5Nff/31Zr5u3Tozt6Ybh6ZJh5YsDy3XPnjwYDN/4403YrPQkuahLbxTxSM/kVMsfiKnWPxETrH4iZxi8RM5xeIncorFT+SUnM/834wfTMR8sNB8fmt+9qpVq8y2oW2PQ1tZW30LLU9WUFBg5qE597fffruZW/P5Q1toW0tIA+G+h5aKLioqis2qq6tjMwBYsmSJmd95pz2jvKysLDYL/dyhcxDatrVPkbGWmQeAl156KTZ77rnnzLbDhg2LzSorK1FXV2eviR7hkZ/IKRY/kVMsfiKnWPxETrH4iZxi8RM5xeInciqvxvkzvG8zD225HBqXtcbSQ6y164HweQITJ040c+tnr6+vN9uOHTvWzFesWGHmI0aMMPNx48bFZqFt0R999FEzD/2fWmP5tbW1ZtvQ83LkyBEzD50nUFpaGps9++yzZtuSkhIzV1WO8xNRPBY/kVMsfiKnWPxETrH4iZxi8RM5xeIncio4zi8iJQAWASgGoADKVfWPIvI8gN8AOBjddIaq/jVwX7k7qYAoA6HzRkJ1U1hYaObWOgu9evUy2x48eNDMUx3nT2XTjjMAfq+qm0WkC4BNIrI6ymar6n+k8kBElF+Cxa+q1QCqo8u1IrIdwGUt3TEialnn9Z5fRPoDGArg3B5Oj4vIVhGZLyLdYtpMFZEKEanIqKdElFUpn9svIp0BrAXw76r6vogUAziExs8BXgDQR1UfCdwH3/NTq+DhPX9KR34RKQDwHoA/qer70QPUqGqDqp4FMBfA8FTui4jyQ7D4pfFP4DwA21X1D02u79PkZhMAfJn97hFRS0llqG8kgP8GsA3AufWtZwCYBKAUjS/7dwP4bfThoHVffNlP1MJSfdl/wcznJ6JGnM9PRCYWP5FTLH4ip1j8RE6x+ImcYvETOcXiJ3KKxU/kFIufyCkWP5FTLH4ip1j8RE6x+ImcYvETOZXK6r3ZdAjAnibf94yuy0f52rd87RfAvqUrm32z9z1vIqfz+X/x4CIVqlqWWAcM+dq3fO0XwL6lK6m+8WU/kVMsfiKnki7+8oQf35KvfcvXfgHsW7oS6Vui7/mJKDlJH/mJKCGJFL+IjBGRShHZKSLPJNGHOCKyW0S2iciWpLcYi7ZBOyAiXza5rruIrBaRHdHXZrdJS6hvz4vI3ui52yIi4xLqW4mIfCoiX4vIVyLyVHR9os+d0a9Enrecv+wXkTYA/gZgFIAqABsBTFLVr3PakRgishtAmaomPiYsIrcCOA5gkapeG103C8BhVX0x+sPZTVWn50nfngdwPOmdm6MNZfo03VkawH0A/hUJPndGvx5AAs9bEkf+4QB2quouVT0F4M8AxifQj7ynqusAHP7Z1eMBLIwuL0TjL0/OxfQtL6hqtapuji7XAji3s3Siz53Rr0QkUfyXAfh7k++rkF9bfiuAVSKySUSmJt2ZZhQ32RlpP4DiJDvTjODOzbn0s52l8+a5S2fH62zjB36/NFJVhwEYC2Ba9PI2L2nje7Z8Gq6ZA2AgGrdxqwbwcpKdiXaWfg/A71T1WNMsyeeumX4l8rwlUfx7AZQ0+b5vdF1eUNW90dcDAD5A/u0+XHNuk9To64GE+/N/8mnn5uZ2lkYePHf5tON1EsW/EcAgERkgIoUAJgJYlkA/fkFEOkUfxEBEOgEYjfzbfXgZgMnR5ckAlibYl5/Il52b43aWRsLPXd7teK2qOf8HYBwaP/H/FsCzSfQhpl//BOB/on9fJd03AEvQ+DLwNBo/G5kCoAeANQB2APgvAN3zqG+L0bib81Y0FlqfhPo2Eo0v6bcC2BL9G5f0c2f0K5HnjWf4ETnFD/yInGLxEznF4idyisVP5BSLn8gpFj+RUyx+IqdY/ERO/S+Zr01mmeRmTwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAFCxJREFUeJzt3WtslWW2B/D/4m7LrdxKaauUgkIFRFJRHDEqOiIZI0aiY8yE8dJOdCY5k8yHYzwf5MtJjDkzHhPNREbNwBEdNOMFovGaA3jBkcodgeFiwUJpuSpFwNKu86Evc6r2Xavs27vr8/8lhHb/+3Q/bVnsvfu8z7NEVUFE4emV9ASIKBksfqJAsfiJAsXiJwoUi58oUCx+okCx+IkCxeInChSLnyhQfXJ5ZyLCywmzoKKiIjbr37+/OVZEzNwbf/r0aTPfvXt3bNba2mqOpdSoqv1DjUg6l/eKyBwATwLoDeBZVX3M+XgWfxYsXbo0Nhs3bpw51ivuyspKM9+6dauZz58/PzY7cOCAOZZS093iT/lpv4j0BvA0gFsAVAG4W0SqUv18RJRb6bzmnwFgl6ruUdXvAPwNwG2ZmRYRZVs6xV8K4KtO7zdEt32PiNSKSJ2I1KVxX0SUYVn/hZ+qLgKwCOBrfqJ8ks4j/34A5Z3eL4tuI6IeIJ3iXwtggohUiEg/AL8EsDwz0yKibEt3qW8ugP9Gx1Lf86r6n87H82l/CiZPnmzmGzduTPlz9+pl///f0NBg5oWFhWbet2/f2My6PgEADh8+bObUte4u9aX1ml9V3wLwVjqfg4iSwct7iQLF4icKFIufKFAsfqJAsfiJAsXiJwpUWuv8531nXOfvUu/evc185cqVZj516tTY7OzZs+ZY7+d//PhxMy8uLjbzgoKC2OzFF180x953331mzvMAupb1Lb1E1LOx+IkCxeInChSLnyhQLH6iQLH4iQLFpb48MGbMGDPfuXOnmffpE7850/v5Njc3m3l5ebmZt7S0mPmxY8diM2+pzlvqW7VqlZmHikt9RGRi8RMFisVPFCgWP1GgWPxEgWLxEwWKxU8UqJy26P6psratdie//fbbzfzo0aNmvn379tjs2WefNcfW1NSYubeOv2vXLjMfPXp0bDZ9+nRzbG1trZkfOXLEzL/88svY7OTJk+bYEPCRnyhQLH6iQLH4iQLF4icKFIufKFAsfqJAsfiJApVui+56ACcAtAE4q6rVzsf32P3848ePj80ef/xxc6x3/LXXJvuiiy4y8/b29tjM2usPAE899ZSZe9cgLFu2zMzvuOOO2Mxq3w0A9fX1Zm79TABg//79sdmaNWvMsS+//LKZ5/IcjPOVkxbdketVlY3UiXoYPu0nClS6xa8A3hWRz0XEvhaTiPJKuk/7r1HV/SIyCsB7IrJdVVd3/oDoPwX+x0CUZ9J65FfV/dHfzQBeAzCji49ZpKrV3i8DiSi3Ui5+ESkUkUHn3gbwcwBbMjUxIsqudJ72FwN4TUTOfZ4XVfXtjMyKiLKO5/ZHBgwYYOaPPvpobHbPPfeYY6P/IGN5+/W9PfOFhYWx2YEDB8yx3jUGXovvU6dOmfnMmTNjs23btpljS0tLzXzkyJFmbu33r6ioMMfOmPGjV7Df09TUZOZJ4rn9RGRi8RMFisVPFCgWP1GgWPxEgWLxEwWKR3dHBg8ebOZFRUWx2Y4dO9K67zfeeMPMp0yZYubWEdVee2/raG0AGDRokJlXV9sXbq5duzY2u/zyy82x3txLSkrMvK6uLjbzlhGtpV0AeOihh8y8J+AjP1GgWPxEgWLxEwWKxU8UKBY/UaBY/ESBYvETBeons87vbZv1ti5769lWS2dvS651tDYATJo0ycybm5vNvK2tLTbz1qO9ralei+7Nmzeb+XfffRebDR061By7b98+Mx8+fLiZX3311bGZtx3Y4/178+TD0d985CcKFIufKFAsfqJAsfiJAsXiJwoUi58oUCx+okD9ZNb501039cZb6+HTp083xw4ZMsTMt2/fbubTpk0z8y1b4nulNDY2mmOPHTtm5t7R3dZaOgD069cvNvOuIfC+bu9Ycuucg1GjRpljy8rKzNyTD+v4Hj7yEwWKxU8UKBY/UaBY/ESBYvETBYrFTxQoFj9RoNx1fhF5HsAvADSr6uTotmEAlgEYC6AewJ2qai8YJ8xabwaAqVOnmvlNN90Um+3du9ccO3v2bDNvaGhIK7fOr/fOKfDOxt+0aZOZnz592sw3bNgQm3n9CLz9/t546zqAJUuWmGPLy8vNPN3zI/JBdx75/wpgzg9uexjAB6o6AcAH0ftE1IO4xa+qqwH88Kia2wAsjt5eDGBehudFRFmW6mv+YlU9d93oQQDFGZoPEeVI2tf2q6qKSOwLHBGpBVCb7v0QUWal+sjfJCIlABD9HXvCpKouUtVqVbU7OhJRTqVa/MsBLIjeXgDAbjNLRHnHLX4ReQnAGgCXiEiDiNwP4DEAN4nITgA3Ru8TUQ/ivuZX1btjInvxOgXZXDu1zo8HgAsvvNDM33nnndhs/Pjx5tjevXunlQ8bNszMrfXsvn37mmO96wAqKirM3Pv81157bWx24sQJc+yll15q5rt27TJz67yAmTNnmmO9fw+jR482c++sgXzAK/yIAsXiJwoUi58oUCx+okCx+IkCxeInClROj+4WEXNpyGtl7R0jbfGW07yWzdZy3scff2yOveqqq8zcO6L6zJkzZv7JJ5/EZqdOnTLHtra2mrmnoKDAzMeOHRubedusBwwYYOaHDh0yc+tn6i0br1q1ysx/CvjITxQoFj9RoFj8RIFi8RMFisVPFCgWP1GgWPxEgcr5On+fPvF36W3pbWtri828ddv+/fubuXdMdGFhYWx2xRVXmGO9awy89ew9e/aY+YQJE2Izb+up1cYasI8sB/wW359++mlsNnHiRHOst13Ya42+e/fu2Oyrr74yx1ZVVZm51+I7m1t6rRo6n2th+MhPFCgWP1GgWPxEgWLxEwWKxU8UKBY/UaBY/ESByuk6P2CvUWbz6G5vrf2LL75IefzmzZvNsVYLbcBvwT18+HAzv/jii83c4u2pP378uJk3NjaauXV9hXdOgfd119fXm/k333wTmzU3xzaZAgBUVlaa+Y033mjmVmvydFnHrVtf8w/xkZ8oUCx+okCx+IkCxeInChSLnyhQLH6iQLH4iQLlrvOLyPMAfgGgWVUnR7ctBFAD4NzB6Y+o6lve52pvbzfXIcvKyszx1rqvty/da9F9ww03mLm15/7dd981x1pn1wP+2frWnngAuOSSS2Kzbdu2mWO9swS8+/bah1vXdaxfv94ca51TAAArVqwwc6s9+Lx588yx3nUf1teVbd4ZCt3VnUf+vwKY08XtT6jqtOiPW/hElF/c4lfV1QCO5mAuRJRD6bzm/52IbBKR50WkKGMzIqKcSLX4/wygEsA0AI0A/hj3gSJSKyJ1IlKX4n0RURakVPyq2qSqbaraDuAvAGYYH7tIVatVtTrVSRJR5qVU/CLSeZva7QC2ZGY6RJQr3VnqewnAdQBGiEgDgEcBXCci0wAogHoAv8niHIkoC9ziV9W7u7j5uSzMBU1NTWZunUnu7fUvLi4284MHD5r5rFmzYjPvjHZvv7+3H7+goMDMra/dW4f3egJ4Z+ePGDHCzC0XXHCBmXt74ltbW83c+vfinbGwdOlSM+/Vy37SnM2zKaz7bm9v7/7nSXkGRNSjsfiJAsXiJwoUi58oUCx+okCx+IkCldy+xC54y0oWb9mnvLzczI8etfcuffjhh7GZt/X0yJEjZu610faOkbaO1/7222/NsYMHDzbzgQMHmnlpaamZW1upd+zYYY71WnCPGTPGzK0t4t5SnLUdGABWrlxp5uks5XnOZznPwkd+okCx+IkCxeInChSLnyhQLH6iQLH4iQLF4icKVE7X+YuKiszWxm+++aY53lpz9o4zXrdunZnfe++9Zv7ZZ5/FZlVVVeZY7+jujRs3mrl3/Pa4ceNiM2+d32tdfujQITP3tt1aPzPvOHWrvTcAjBo1ysytI829VtZea3JvS3BPwEd+okCx+IkCxeInChSLnyhQLH6iQLH4iQLF4icKVE7X+Y8dO4ZXXnklNvfaHlt7pL392UOHDjVzb0++dR6At1burUcXFhaa+ZAhQ8zcOg/go48+Msda110AwOHDh83cW4u3jkxvaWkxx44cOdLMvbbqdXXxHeJOnjxpjvVavk+ZMsXMs3l0d6bwkZ8oUCx+okCx+IkCxeInChSLnyhQLH6iQLH4iQLlrvOLSDmAJQCKASiARar6pIgMA7AMwFgA9QDuVFV7U73DaqkMAP369YvNvHXTiRMnmvkTTzxh5rNnz47NvHV4rz249XUBQH19vZnv3bs3NvOuX2hubjZzb51/8uTJZm71YvDWwpcvX27mp06dMvO2trbYzGvJXlNTY+Zvv/22mf9U1vnPAviDqlYBuArAb0WkCsDDAD5Q1QkAPojeJ6Iewi1+VW1U1XXR2ycAbANQCuA2AIujD1sMYF62JklEmXder/lFZCyAywH8A0CxqjZG0UF0vCwgoh6i29f2i8hAAH8H8HtV/abzaxpVVRHp8kWMiNQCqE13okSUWd165BeRvugo/KWq+mp0c5OIlER5CYAuf3OkqotUtVpVqzMxYSLKDLf4peMh/jkA21T1T52i5QAWRG8vAPBG5qdHRNnSnaf9PwPwKwCbReTcOc2PAHgMwMsicj+AvQDuzM4U/9+ZM2dSHturl/3/3C233GLmV155ZWzmtfceMWKEma9evdrMva3OBQUFsZl3BLX1dQHAvn37zNxbKrS2O6fbJnvTpk1mbi0FDh8+3BzrbVW+7rrrzDwflvI8bvGr6kcA4n5K8YvfRJTXeIUfUaBY/ESBYvETBYrFTxQoFj9RoFj8RIHK6dHd6bK2aHq8Lb3e0d4nTpyIzZ555hlz7MKFC8186tSpKd83YK+X79mzxxy7c+dOM/dan+/evdvMrbV2r324t47vtQ+3jt/2rn+YNGmSmb///vtm3hPW+fnITxQoFj9RoFj8RIFi8RMFisVPFCgWP1GgWPxEgepR6/zpeOGFF8zcW3N+4IEHYrOvv/7aHPvee++Z+c0332zm3jUIgwYNis28Y7+rqqrM3DtrwFsPt/bze0eeDxgwwMy9cxIOHDgQmz344IPmWO/fg9UWHfDPA0jnbIpM4SM/UaBY/ESBYvETBYrFTxQoFj9RoFj8RIFi8RMFKph1/rvuusvM58yZY+Zbt26NzVpbW82xRUVFZj548GAz91qXW+2mKyoqzLEtLS1m7p2hsH37djOfMmVKbNbe3m6Otdp7A36b7MrKythszZo15livbbp37r/XJyIf5P8MiSgrWPxEgWLxEwWKxU8UKBY/UaBY/ESBYvETBcpd5xeRcgBLABQDUACLVPVJEVkIoAbAucPTH1HVt9KZTGFhoZmfPHky5c+9YsUKMy8rKzPzWbNmxWYlJSXm2JqaGjOfMWOGmQ8bNszMrbX0VatWmWPnz59v5t56uHWWgHf/Y8aMMcd6P+9XX33VzK3v66233mqO9XopHD161Mz79Mn/S2i6M8OzAP6gqutEZBCAz0Xk3OkUT6jqf2VvekSULW7xq2ojgMbo7RMisg1AabYnRkTZdV6v+UVkLIDLAfwjuul3IrJJRJ4XkS6vYRWRWhGpE5G6tGZKRBnV7eIXkYEA/g7g96r6DYA/A6gEMA0dzwz+2NU4VV2kqtWqWp2B+RJRhnSr+EWkLzoKf6mqvgoAqtqkqm2q2g7gLwDs31oRUV5xi186WsA+B2Cbqv6p0+2df8V9O4AtmZ8eEWVLd37b/zMAvwKwWUQ2RLc9AuBuEZmGjuW/egC/SXcy3tKO1Yraa4lstYoGgPXr15v53LlzYzNvifL06dNm7h2P7Xn99ddTHpvufecza5nysssuM8eOHDnSzFeuXGnm6bSTz5Xu/Lb/IwBdVV1aa/pElCxe4UcUKBY/UaBY/ESBYvETBYrFTxQoFj9RoPJ/32En3lq+pbTU3ot0/fXXm/nTTz8dm1mtoCk51vHb3nHoAwcONHPvyHLvupJ8wEd+okCx+IkCxeInChSLnyhQLH6iQLH4iQLF4icKlKSzdn7edyZyCMDeTjeNAHA4ZxM4P/k6t3ydF8C5pSqTc7tIVe3DCCI5Lf4f3blIXb6e7Zevc8vXeQGcW6qSmhuf9hMFisVPFKiki39Rwvdvyde55eu8AM4tVYnMLdHX/ESUnKQf+YkoIYkUv4jMEZEdIrJLRB5OYg5xRKReRDaLyIakW4xFbdCaRWRLp9uGich7IrIz+rvLNmkJzW2hiOyPvncbRCT+vPPszq1cRP5XRL4Qka0i8m/R7Yl+74x5JfJ9y/nTfhHpDeCfAG4C0ABgLYC7VfWLnE4khojUA6hW1cTXhEXkWgAtAJao6uTotscBHFXVx6L/OItU9d/zZG4LAbQk3bk5aihT0rmzNIB5AH6NBL93xrzuRALftyQe+WcA2KWqe1T1OwB/A3BbAvPIe6q6GsAPG8HfBmBx9PZidPzjybmYueUFVW1U1XXR2ycAnOssnej3zphXIpIo/lIAX3V6vwH51fJbAbwrIp+LSG3Sk+lCcdQ2HQAOAihOcjJdcDs359IPOkvnzfculY7XmcZf+P3YNao6HcAtAH4bPb3NS9rxmi2flmu61bk5V7roLP0vSX7vUu14nWlJFP9+AOWd3i+LbssLqro/+rsZwGvIv+7DTeeapEZ/Nyc8n3/Jp87NXXWWRh587/Kp43USxb8WwAQRqRCRfgB+CWB5AvP4EREpjH4RAxEpBPBz5F/34eUAFkRvLwDwRoJz+Z586dwc11kaCX/v8q7jtarm/A+Auej4jf9uAP+RxBxi5jUOwMboz9ak5wbgJXQ8DWxFx+9G7gcwHMAHAHYCeB/AsDya2/8A2AxgEzoKrSShuV2Djqf0mwBsiP7MTfp7Z8wrke8br/AjChR/4UcUKBY/UaBY/ESBYvETBYrFTxQoFj9RoFj8RIFi8RMF6v8AkzGpsoyxT5AAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAERtJREFUeJzt3XuMlFWax/HfI3QjoKCIQIMoXggyARVCyKq4XlaMt6ATjQ4aw2ZH25gxcXT/WOP+sSabjZONM5vln0mYKMOsM+Ia7wo7umYD44WBRl1UdBAVBW1uoshFbs2zf3SxabXf57TdVfUWnO8n6dBdT5+qQ3X/+n2rznvOMXcXgPwcVXYHAJSD8AOZIvxApgg/kCnCD2SK8AOZIvxApgg/kCnCD2Sqfz0fzMy4nBCoMXe3nnxfn478Zna5mf3FzNaa2b19uS8A9WW9vbbfzPpJWiNppqQNklZImu3uq4M2HPmBGqvHkX+6pLXu/pG775O0UNI1fbg/AHXUl/CPkbS+y9cbKrd9i5m1mlmbmbX14bEAVFnN3/Bz93mS5kmc9gONpC9H/s8kje3y9UmV2wAcBvoS/hWSxpvZqWbWLOknkp6tTrcA1FqvT/vd/YCZ3Snpj5L6SXrY3d+tWs8A1FSvh/p69WC85gdqri4X+QA4fBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTPV6i25JMrN1knZI6pB0wN2nVaNTAGqvT+GvuNjdt1bhfgDUEaf9QKb6Gn6X9KKZrTSz1mp0CEB99PW0f4a7f2ZmIyS9ZGbvu/vSrt9Q+aPAHwagwZi7V+eOzO6XtNPdHwy+pzoPBqCQu1tPvq/Xp/1mNtjMjj30uaTLJL3T2/sDUF99Oe0fKekpMzt0P39w9/+qSq8A1FzVTvt79GCc9h9xhg8fHtbPPffcwtrixYvDth0dHWG9cuApFP1up37v+3LfktTU1BTWDxw4UFhrbm4O2+7duzes1/y0H8DhjfADmSL8QKYIP5Apwg9kivADmarGrD4cwS6++OKw/sADD4T17du3F9bGjRsXth0xYkRYHzJkSFifP39+Ya1///hXf/fu3WE9JRrilKSpU6cW1tauXRu2nTt3bmEtNTzaFUd+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyxTh/5lJTVwcNGhTWR48eHdZHjRpVWJs8eXLY9qij4mPTsmXLwvqAAQMKa6n/9z333BPWJ06cGNaXL18e1m+55ZbC2pw5c8K2EyZMKKx99NFHYduuOPIDmSL8QKYIP5Apwg9kivADmSL8QKYIP5Apxvkzl1qC+s033wzrS5YsCevnnXdeYe31118P25566qlhfdasWWF90aJFhbXNmzeHbb/55puwftddd4X1xx9/PKzv2rWrsHbccceFbTdt2lRY279/f9i2K478QKYIP5Apwg9kivADmSL8QKYIP5Apwg9kKjnOb2YPS7pa0mZ3n1S5bZikxySNk7RO0g3u/mXtuomyRGPKUnqcf8aMGYW1lpaWsG1qzvy+ffvC+rXXXltYGz9+fNj2ww8/DOs333xzWF+zZk1YHzt2bGEtNc7/1VdfFdaqvW7/byVd/p3b7pX0sruPl/Ry5WsAh5Fk+N19qaRt37n5GkkLKp8vkFT8JxZAQ+rta/6R7t5e+XyjpJFV6g+AOunztf3u7mZWeIG4mbVKau3r4wCort4e+TeZWYskVf4tnCXh7vPcfZq7T+vlYwGogd6G/1lJh5YYnSPpmep0B0C9JMNvZo9Kel3SBDPbYGY/lfQLSTPN7ANJl1a+BnAYsdR87qo+WPDeAMrRr1+/sB6NlUvxXvGStH379sJaas58asy6ubk5rEfXEezduzds+9xzz4X1E088Maynro847bTTCms7d+4M2956662FtV27dqmjoyPelKCCK/yATBF+IFOEH8gU4QcyRfiBTBF+IFMs3X2Ei7apltJbcLe1tYX11atXh/X169cX1k4++eSw7VNPPRXWhw8fHtYXLFhQWLvxxhvDtlOnTg3r7e3tYf3oo48O65MmTSqsvfLKK2HbaIj04MGDYduuOPIDmSL8QKYIP5Apwg9kivADmSL8QKYIP5ApxvkPA2Y9mqHZrf794x9xatpsahnpaCxdisfTP/7447Dt6NGjw/r7778f1keNGlVYS23/feyxx4b1PXv2hPWnn346rA8dOrSwNnJkvCRmtabhc+QHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBTjPM3gDFjxoT1aDtnSTr++OMLa/v37w/brlixIqynltduamoK69H88ldffTVsO2HChLB+1FHxsWvWrFmFtR07doRtN27cGNZT6yScddZZYT3aIvyLL74I20bXbjCfH0AS4QcyRfiBTBF+IFOEH8gU4QcyRfiBTCXH+c3sYUlXS9rs7pMqt90v6TZJWyrfdp+7L6pVJxtBNKacWj/+sssuC+vXXXddWN+yZUtYj7Z0Xrx4cdh2/vz5YT21lfWyZcvCenSdQWqs/IQTTgjrV199dVg/++yzC2uLFsW/rqmty1N7DqT+b9FaBak9A6K1CNatWxe27aonR/7fSrq8m9v/zd3PqXwc0cEHjkTJ8Lv7Uknb6tAXAHXUl9f8d5rZKjN72MyKry8F0JB6G/5fSzpd0jmS2iX9sugbzazVzNrMLN70DUBd9Sr87r7J3Tvc/aCk30iaHnzvPHef5u7TettJANXXq/CbWUuXL38s6Z3qdAdAvfRkqO9RSRdJGm5mGyT9k6SLzOwcSS5pnaTba9hHADWQDL+7z+7m5odq0JekgQMHFtaGDBkStk2tT3/MMceE9dmzu3saOl1//fVh2+bm5rCe2sv9xRdfDOvRNQhz584N227bFg/kLF++PKyn1piP5uyn5uOn1gpIra3f3t5eWBs2bFjY9vzzzw/rBw4cCOuPPPJIWI+et08++SRsO2XKlMLa5s2bw7ZdcYUfkCnCD2SK8AOZIvxApgg/kCnCD2Sqrkt3m1k4fDN58uSwfbQUc1tbfPVwa2trWE9Ny42WsG5paSmsSdJrr70W1l966aWwfumll4b1qO+pLbpT04VTP5PUsuNLly4trI0bNy5sm1o2/PTTTw/ru3fvLqxt3749bLtw4cKwftttt4X11PDvl19+WVi74oorwrZ9+Xl3xZEfyBThBzJF+IFMEX4gU4QfyBThBzJF+IFMmbvX7cH69+/vQ4cOLawvWLAgbH/RRRcV1t56662wbWpJ4xdeeCGsm1lh7fbb4+UMVq9eHdZTS3enpr5G00s3bNgQtk1NJz7zzDPD+tdffx3Wo593aupqtPW4lJ7SG42l79mzJ2ybmuKdmm68fv36sB5tAZ5auvvCCy8srK1Zs0a7d+8u/mXtgiM/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZqut8/qamJo0YMaKwfvfdd4ftTzrppMJaNHdbkk455ZSwnrre4Y477iisjR07Nmz75JNPhvWVK1eG9a1bt4b1iRMnFtY+/fTTsG1qrYGZM2eG9dR1BM8//3xhbe3atWHbHTt2hPWbbroprEfLhkfj7D157NTPfM2aNWF90qRJhbXU7+rnn39eWIu2RP8ujvxApgg/kCnCD2SK8AOZIvxApgg/kCnCD2QqOZ/fzMZK+p2kkZJc0jx3/3czGybpMUnjJK2TdIO7F0+gltTc3OzRmuOpOdbROu2p8c2dO3eG9dTWxtEa8hMmTOjTfafWWk9tBx2tQZ+al96vX7+wHs2Jl9LXRxw8eLCwlpoTP2jQoLCe2mY7WoMhqknp60ZSv6sDBgwI69EeFKnrH6ZPn15Ymz9/vtrb26s2n/+ApL939x9J+itJPzOzH0m6V9LL7j5e0suVrwEcJpLhd/d2d3+j8vkOSe9JGiPpGkmHlt5ZIOnaWnUSQPX9oNf8ZjZO0hRJf5Y00t3bK6WN6nxZAOAw0ePwm9kxkp6Q9HN3/9bCbd75wq/bF39m1mpmbWbWFr3+A1BfPQq/mTWpM/i/d/dDs1Q2mVlLpd4iqdt3tdx9nrtPc/dpqYUoAdRPMo3W+bboQ5Lec/dfdSk9K2lO5fM5kp6pfvcA1EpPhvpmSPqTpLclHTpvv0+dr/v/U9LJkj5R51DftsR9hQ+WWiZ68ODBhbULLrggbHvJJZeE9eXLl4f1aNhp7969YdvU8thLliwJ66mtqgcOHNirmiRdddVVYf3BBx8M61OmTAnr0VDiGWecEbZNPa+prdFXrVpVWNu1a1fYNjXUFw3VSempztFy7qkhzscee6ywtmfPHnV0dPRoqC85n9/dX5FUdGd/05MHAdB4eBEOZIrwA5ki/ECmCD+QKcIPZIrwA5mq6xbdqXH+RhaNV/f1OUxtNZ1aRjqSmrI7ZMiQsJ7q26ZNm8L6vn37Cmupy71TU3ZT1wFE07yjqeWStGXLlrCeGotPTflN1fvC3dmiG0Axwg9kivADmSL8QKYIP5Apwg9kivADmWKcHzjCMM4PIET4gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwRfiBTyfCb2Vgz+x8zW21m75rZXZXb7zezz8zsrcrHlbXvLoBqSS7mYWYtklrc/Q0zO1bSSknXSrpB0k53f7DHD8ZiHkDN9XQxj/49uKN2Se2Vz3eY2XuSxvStewDK9oNe85vZOElTJP25ctOdZrbKzB42s+ML2rSaWZuZtfWppwCqqsdr+JnZMZKWSPoXd3/SzEZK2irJJf2zOl8a/F3iPjjtB2qsp6f9PQq/mTVJel7SH939V93Ux0l63t0nJe6H8AM1VrUFPM3MJD0k6b2uwa+8EXjIjyW980M7CaA8PXm3f4akP0l6W9KhPZXvkzRb0jnqPO1fJ+n2ypuD0X1x5AdqrKqn/dVC+IHaY91+ACHCD2SK8AOZIvxApgg/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZIvxApgg/kCnCD2QquYBnlW2V9EmXr4dXbmtEjdq3Ru2XRN96q5p9O6Wn31jX+fzfe3CzNnefVloHAo3at0btl0TfequsvnHaD2SK8AOZKjv880p+/Eij9q1R+yXRt94qpW+lvuYHUJ6yj/wASlJK+M3scjP7i5mtNbN7y+hDETNbZ2ZvV3YeLnWLsco2aJvN7J0utw0zs5fM7IPKv91uk1ZS3xpi5+ZgZ+lSn7tG2/G67qf9ZtZP0hpJMyVtkLRC0mx3X13XjhQws3WSprl76WPCZvbXknZK+t2h3ZDM7F8lbXP3X1T+cB7v7v/QIH27Xz9w5+Ya9a1oZ+m/VYnPXTV3vK6GMo780yWtdfeP3H2fpIWSrimhHw3P3ZdK2vadm6+RtKDy+QJ1/vLUXUHfGoK7t7v7G5XPd0g6tLN0qc9d0K9SlBH+MZLWd/l6gxpry2+X9KKZrTSz1rI7042RXXZG2ihpZJmd6UZy5+Z6+s7O0g3z3PVmx+tq4w2/75vh7lMlXSHpZ5XT24bkna/ZGmm45teSTlfnNm7tkn5ZZmcqO0s/Ienn7v5111qZz103/SrleSsj/J9JGtvl65MqtzUEd/+s8u9mSU+p82VKI9l0aJPUyr+bS+7P/3P3Te7e4e4HJf1GJT53lZ2ln5D0e3d/snJz6c9dd/0q63krI/wrJI03s1PNrFnSTyQ9W0I/vsfMBlfeiJGZDZZ0mRpv9+FnJc2pfD5H0jMl9uVbGmXn5qKdpVXyc9dwO167e90/JF2pznf8P5T0j2X0oaBfp0n638rHu2X3TdKj6jwN3K/O90Z+KukESS9L+kDSf0sa1kB9+w917ua8Sp1BaympbzPUeUq/StJblY8ry37ugn6V8rxxhR+QKd7wAzJF+IFMEX4gU4QfyBThBzJF+IFMEX4gU4QfyNT/AeY1k3WNldveAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "z = torch.randn(BATCH_SIZE, 64).to(DEVICE)\n", + "fake_images = G(z)\n", + "for i in range(10):\n", + " fake_images_img = np.reshape(fake_images.data.cpu().numpy()[i],(28, 28))\n", + " plt.imshow(fake_images_img, cmap = 'gray')\n", + " plt.show()" ] } ], diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.py" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.py" index 3d2a977..6aa8a87 100644 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.py" +++ "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan.py" @@ -1,12 +1,10 @@ #!/usr/bin/env python # coding: utf-8 -# # 9.1 GAN으로 새로운 패션아이템 생성하기 +# # GAN으로 새로운 패션아이템 생성하기 # *GAN을 이용하여 새로운 패션 아이템을 만들어봅니다* -# GAN을 구현하기 위해 그 구조를 더 자세히 알아보겠습니다. -# GAN은 생성자(Generator)와 판별자(Discriminator) 2개의 신경망으로 -# 이루어져 있습니다. -# ## GAN 구현하기 +# 이 프로젝트는 최윤제님의 파이토치 튜토리얼 사용 허락을 받아 참고했습니다. +# * [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License import os import torch @@ -15,20 +13,23 @@ import torch.optim as optim from torchvision import transforms, datasets from torchvision.utils import save_image +import matplotlib.pyplot as plt +import numpy as np -torch.manual_seed(1) # reproducible - +# EPOCHS 과 BATCH_SIZE 등 학습에 필요한 하이퍼 파라미터 들을 설정해 줍니다. -# Hyper Parameters -EPOCHS = 100 +# 하이퍼파라미터 +EPOCHS = 500 BATCH_SIZE = 100 USE_CUDA = torch.cuda.is_available() DEVICE = torch.device("cuda" if USE_CUDA else "cpu") print("Using Device:", DEVICE) -# Fashion MNIST digits dataset +# 학습에 필요한 데이터셋을 로딩합니다. + +# Fashion MNIST 데이터셋 trainset = datasets.FashionMNIST('./.data', train=True, download=True, @@ -42,17 +43,9 @@ shuffle = True) -# Discriminator -D = nn.Sequential( - nn.Linear(784, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 1), - nn.Sigmoid()) - +# 생성자는 64차원의 랜덤한 텐서를 입력받아 이에 행렬곱(Linear)과 활성화 함수(ReLU, Tanh) 연산을 실행합니다. 생성자의 결과값은 784차원, 즉 Fashion MNIST 속의 이미지와 같은 차원의 텐서입니다. -# Generator +# 생성자 (Generator) G = nn.Sequential( nn.Linear(64, 256), nn.ReLU(), @@ -62,71 +55,85 @@ nn.Tanh()) +# 판별자는 784차원의 텐서를 입력받습니다. 판별자 역시 입력된 데이터에 행렬곱과 활성화 함수를 실행시키지만, 생성자와 달리 판별자의 결과값은 입력받은 텐서가 진짜인지 구분하는 예측값입니다. + +# 판별자 (Discriminator) +D = nn.Sequential( + nn.Linear(784, 256), + nn.LeakyReLU(0.2), + nn.Linear(256, 256), + nn.LeakyReLU(0.2), + nn.Linear(256, 1), + nn.Sigmoid()) + + +# 생성자와 판별자 학습에 쓰일 오차 함수와 최적화 알고리즘도 정의해 줍니다. -# Device setting +# 모델의 가중치를 지정한 장치로 보내기 D = D.to(DEVICE) G = G.to(DEVICE) -# Binary cross entropy loss and optimizer +# 이진 크로스 엔트로피 (Binary cross entropy) 오차 함수와 +# 생성자와 판별자를 최적화할 Adam 모듈 criterion = nn.BCELoss() d_optimizer = optim.Adam(D.parameters(), lr=0.0002) g_optimizer = optim.Adam(G.parameters(), lr=0.0002) +# 모델 학습에 필요한 준비는 끝났습니다. 그럼 본격적으로 GAN을 학습시키는 loop을 만들어 보겠습니다. + total_step = len(train_loader) for epoch in range(EPOCHS): for i, (images, _) in enumerate(train_loader): images = images.reshape(BATCH_SIZE, -1).to(DEVICE) - # Create the labels which are later used as input for the BCE loss + # '진짜'와 '가짜' 레이블 생성 real_labels = torch.ones(BATCH_SIZE, 1).to(DEVICE) fake_labels = torch.zeros(BATCH_SIZE, 1).to(DEVICE) - - # Train Discriminator - - # Compute BCE_Loss using real images where BCE_Loss(x, y): - y * log(D(x)) - (1-y) * log(1 - D(x)) - # Second term of the loss is always zero since real_labels == 1 + + # 판별자가 진짜 이미지를 진짜로 인식하는 오차를 예산 outputs = D(images) d_loss_real = criterion(outputs, real_labels) real_score = outputs - # Compute BCELoss using fake images - # First term of the loss is always zero since fake_labels == 0 + # 무작위 텐서로 가짜 이미지 생성 z = torch.randn(BATCH_SIZE, 64).to(DEVICE) fake_images = G(z) + + # 판별자가 가짜 이미지를 가짜로 인식하는 오차를 계산 outputs = D(fake_images) d_loss_fake = criterion(outputs, fake_labels) fake_score = outputs - # Backprop and optimize + # 진짜와 가짜 이미지를 갖고 낸 오차를 더해서 판별자의 오차를 계산 후 학습 d_loss = d_loss_real + d_loss_fake d_optimizer.zero_grad() d_loss.backward() d_optimizer.step() - # Train Generator - - # Compute loss with fake images - z = torch.randn(BATCH_SIZE, 64).to(DEVICE) + # 생성자가 판별자를 속였는지에 대한 오차를 계산 fake_images = G(z) outputs = D(fake_images) - - # We train G to maximize log(D(G(z)) instead of minimizing log(1-D(G(z))) - # For the reason, see the last paragraph of section 3. https://arxiv.org/pdf/1406.2661.pdf g_loss = criterion(outputs, real_labels) - # Backprop and optimize + # 역전파 알고리즘으로 생성자 모델의 학습을 진행 d_optimizer.zero_grad() g_optimizer.zero_grad() g_loss.backward() g_optimizer.step() - if (i+1) % 200 == 0: - print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' - .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), - real_score.mean().item(), fake_score.mean().item())) + # 학습 진행 알아보기 + print('Epoch [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' + .format(epoch, EPOCHS, d_loss.item(), g_loss.item(), + real_score.mean().item(), fake_score.mean().item())) -# ## 참고 -# 본 튜토리얼은 다음 자료를 참고하여 만들어졌습니다. -# * [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License +# 학습이 끝난 생성자의 결과물을 한번 확인해 보겠습니다. + +z = torch.randn(BATCH_SIZE, 64).to(DEVICE) +fake_images = G(z) +for i in range(10): + fake_images_img = np.reshape(fake_images.data.cpu().numpy()[i],(28, 28)) + plt.imshow(fake_images_img, cmap = 'gray') + plt.show() + diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.ipynb" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.ipynb" deleted file mode 100644 index bf6337e..0000000 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.ipynb" +++ /dev/null @@ -1,664 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 9.1 GAN으로 새로운 패션아이템 생성하기\n", - "*GAN을 이용하여 새로운 패션 아이템을 만들어봅니다*\n", - "\n", - "GAN을 구현하기 위해 그 구조를 더 자세히 알아보겠습니다.\n", - "GAN은 생성자(Generator)와 판별자(Discriminator) 2개의 신경망으로 이루어져 있습니다.\n", - "생성자는 실제 데이터와 비슷한 가짜 데이터를 만들어냅니다. 생성자가 만든 가짜 데이터는 '가짜' 라는 레이블을 부여받고 \n", - "Fashion MNIST의 이미지와 같은 '진짜' 데이터와 함께 판별자에 입력됩니다. \n", - "그러면 판별자는 진짜와 가짜 데이터를 구분하는 능력을 학습합니다. 여기서 재밌는점은 판별자가 가짜와 진짜를 제대로 분류할 때 마다 생성자에 대한 페널티는 늘어난다는 것입니다.\n", - "그러므로 생성자는 판별자가 좋은 퍼포먼스를 내는것을 방해하기 위해 더 진짜 데이터와 비슷한 데이터를 생성하게 됩니다.\n", - "이처럼 GAN은 이름 그대로 판별자와 생성자의 경쟁을 통해서 학습하는 모델입니다." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## GAN 구현하기" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "지금까지 해온 것 처럼 구현에 필요한 라이브러리들을 임포트합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import torch\n", - "import torchvision\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "from torchvision import transforms, datasets\n", - "from torchvision.utils import save_image\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "생성자는 랜덤한 텐서를 입력받아 기존 데이터와 비슷한 데이터를 창작하는 '신경망' 입니다. 그러므로 생성자에 입력되는 랜덤 텐서가 어떻게 설정되느냐에 따라 같은 코드라도 결과물과 퍼포먼스 근소하게 달라질 여지가 있습니다. 그러므로 여러분들이 직접 이 책의 GAN 코드를 보면서 구현한 결과와 책에서 보여주는 결과를 최대한 비슷하게 만들어주기 위해 학습 도중 생성되는 모든 랜덤한 값을 동일하게 설정해 주겠습니다." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "torch.manual_seed(1) # reproducible" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "EPOCHS 과 BATCH_SIZE 등 학습에 필요한 하이퍼 파라미터 들을 설정해 줍니다." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Using Device: -1\n" - ] - } - ], - "source": [ - "# Hyper Parameters\n", - "EPOCHS = 100\n", - "BATCH_SIZE = 100\n", - "USE_CDA = torch.cuda.is_available()\n", - "DEVICE = -1#torch.device(\"cuda\" if USE_CUDA else \"cpu\")\n", - "print(\"Using Device:\", DEVICE)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "학습에 필요한 데이터셋을 로딩합니다. " - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "# Fashion MNIST digits dataset\n", - "trainset = datasets.FashionMNIST('./.data',\n", - " train=True,\n", - " download=True,\n", - " transform=transforms.Compose([\n", - " transforms.ToTensor(),\n", - " transforms.Normalize((0.5,), (0.5,))\n", - " ]))\n", - "train_loader = torch.utils.data.DataLoader(\n", - " dataset = trainset,\n", - " batch_size = BATCH_SIZE,\n", - " shuffle = True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "데이터의 로딩이 끝났으면 GAN의 생성자와 판별자를 구현합니다. \n", - "지금까지는 신경망 모델들을 파이썬의 객체로써 정의해 주었습니다. 그렇게 함으로써 신경망의 복잡한 기능과 동작들을 함수의 형태로 편리하게 정의해 줄 수 있었습니다.\n", - "그러나 이번 예제에서 구현할 생성자와 판별자는 비교적 단순한 신경망이므로, 좀 더 간소한 방법을 이용해 정의해 보겠습니다. \n", - "Pytorch가 제공하는 Sequential 자료구조는 신경망의 forward() 동작에 필요한 동작들을 입력받아 이들을 차례대로 실행시키는 신경망 구조체를 만들어 줍니다.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "생성자는 64차원의 랜덤한 텐서를 입력받아 이에 행렬곱(Linear)과 활성화 함수(ReLU, Tanh) 연산을 실행합니다. 생성자의 결과값은 784차원, 즉 Fashion MNIST 속의 이미지와 같은 차원의 텐서입니다." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "# Generator \n", - "G = nn.Sequential(\n", - " nn.Linear(64, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 784),\n", - " nn.Tanh())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "판별자는 784차원의 텐서를 입력받습니다. 판별자 역시 입력된 데이터에 행렬곱과 활성화 함수를 실행시키지만, 생성자와 달리 판별자의 결과값은 입력받은 텐서가 진짜 Fashion MNIST 데이터일 확률값입니다." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# Discriminator\n", - "D = nn.Sequential(\n", - " nn.Linear(784, 256),\n", - " nn.LeakyReLU(0.2),\n", - " nn.Linear(256, 256),\n", - " nn.LeakyReLU(0.2),\n", - " nn.Linear(256, 1),\n", - " nn.Sigmoid())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "생성자와 판별자 학습에 쓰일 오차 함수와 최적화 알고리즘도 정의해 줍니다." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Device setting\n", - "# D = D.to(DEVICE)\n", - "# G = G.to(DEVICE)\n", - "\n", - "# Binary cross entropy loss and optimizer\n", - "criterion = nn.BCELoss()\n", - "d_optimizer = optim.Adam(D.parameters(), lr=0.0002)\n", - "g_optimizer = optim.Adam(G.parameters(), lr=0.0002)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "모델 학습에 필요한 준비는 끝났습니다. 그럼 본격적으로 GAN을 학습시키는 loop을 만들어 보겠습니다. " - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "total_step = len(train_loader)\n", - "for epoch in range(EPOCHS):\n", - " for i, (images, _) in enumerate(train_loader):\n", - " images = images.reshape(BATCH_SIZE, -1)#.to(-1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "데이터셋 속의 진짜 이미지에는 '진짜' 라는 레이블을, 반대로 생성자가 만든 이미지에는 '가짜'라는 레이블링을 해 줘야 합니다. 이 두 레이블을 나타내는 레이블 텐서를 정의해 줍니다." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - " real_labels = torch.ones(BATCH_SIZE, 1)#.to(-1)\n", - " fake_labels = torch.zeros(BATCH_SIZE, 1)#.to(-1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "판별자는 실제 이미지를 보고 '진짜'라고 구분짓는 능력을 학습해야 합니다. 그러기 위해선 실제 이미지를 판별자 신경망에 입력시켜 얻어낸 결과값과 '진짜' 레이블 간의 오차값을 계산해야 합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - " outputs = D(images)\n", - " d_loss_real = criterion(outputs, real_labels)\n", - " real_score = outputs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "다음으로는 생성자의 동작을 정의합니다. 생성자는 무작위한 텐서를 입력받아 실제 이미지와 같은 차원의 텐서를 배출해야합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - " z = torch.randn(BATCH_SIZE, 64)#.to(-1)\n", - " fake_images = G(z)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "생성자가 만들어낸 fake_images를 판별자에 입력합니다. 이번엔 결과값과 '가짜' 레이블 간의 오차를 계산해야 합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - " outputs = D(fake_images)\n", - " d_loss_fake = criterion(outputs, fake_labels)\n", - " fake_score = outputs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "실제 데이터와 가짜 데이터를 가지고 낸 오차를 더해줌으로써 판별자 신경망의 전체 오차가 계산됩니다.\n", - "그 다음 과정은 역전파 알고리즘과 경사 하강법을 통하여 판별자 신경망을 학습시키는 겁니다." - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - " d_loss = d_loss_real + d_loss_fake\n", - " d_optimizer.zero_grad()\n", - " d_loss.backward()\n", - " d_optimizer.step()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "판별자를 학습시키는 코드를 모두 작성했으면 이제 생성자를 학습시킬 차례입니다. \n", - "생성자가 더 진짜같은 데이터셋을 만들어내려면, 생성자가 만들어낸 가짜 이미지를 판별자가 진짜 라고 착각하게 만들어야 합니다. \n", - "즉, 생성자의 결과물을 다시 판별자에 입력시켜, 그 결과물과 real_labels간의 오차를 최소화 시키는 식으로 학습을 진행해야 합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - " fake_images = G(z)\n", - " outputs = D(fake_images)\n", - " g_loss = criterion(outputs, real_labels)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "그리고 마찬가지로 경사 하강법과 역전파 알고리즘을 사용해서 모델의 학습을 완료합니다." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - " d_optimizer.zero_grad()\n", - " g_optimizer.zero_grad()\n", - " g_loss.backward()\n", - " g_optimizer.step()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "학습을 진행하는 동안 오차를 확인하고 생성자의 결과물을 시각화하는 코드 또한 추가시켰습니다." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch [99/100], Step [600/600], d_loss: 1.3870, g_loss: 0.7114, D(x): 0.49, D(G(z)): 0.49\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'np' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mepoch\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0;36m10\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0;36m100\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mfake_images\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfake_images\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m28\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m28\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfake_images\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcmap\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'gray'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'np' is not defined" - ] - } - ], - "source": [ - " if (i+1) % 200 == 0:\n", - " print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", - " .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), \n", - " real_score.mean().item(), fake_score.mean().item()))\n", - " \n", - " if (epoch+1) % 10 == 0 and (i+1) % 100 == 0 :\n", - " fake_images = np.reshape(fake_images.data.numpy()[0],(28, 28))\n", - " plt.imshow(fake_images, cmap = 'gray')\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "학습이 끝난 생성자의 결과물을 한번 확인해 보겠습니다." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![generated_image0](./assets/generated_image0.png)\n", - "![generated_image1](./assets/generated_image1.png)\n", - "![generated_image2](./assets/generated_image2.png)\n", - "![generated_image3](./assets/generated_image3.png)\n", - "![generated_image4](./assets/generated_image4.png)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "전체 코드" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import torch\n", - "import torchvision\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "from torchvision import transforms, datasets\n", - "from torchvision.utils import save_image\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "torch.manual_seed(1) # reproducible" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Hyper Parameters\n", - "EPOCHS = 100\n", - "BATCH_SIZE = 100\n", - "USE_CDA = torch.cuda.is_available()\n", - "DEVICE = torch.device(\"cuda\" if USE_CUDA else \"cpu\")\n", - "\n", - "print(\"Using Device:\", DEVICE)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Fashion MNIST digits dataset\n", - "trainset = datasets.FashionMNIST('./.data',\n", - " train=True,\n", - " download=True,\n", - " transform=transforms.Compose([\n", - " transforms.ToTensor(),\n", - " transforms.Normalize((0.5,), (0.5,))\n", - " ]))\n", - "train_loader = torch.utils.data.DataLoader(\n", - " dataset = trainset,\n", - " batch_size = BATCH_SIZE,\n", - " shuffle = True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Discriminator\n", - "D = nn.Sequential(\n", - " nn.Linear(784, 256),\n", - " nn.LeakyReLU(0.2),\n", - " nn.Linear(256, 256),\n", - " nn.LeakyReLU(0.2),\n", - " nn.Linear(256, 1),\n", - " nn.Sigmoid())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Generator \n", - "G = nn.Sequential(\n", - " nn.Linear(64, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 256),\n", - " nn.ReLU(),\n", - " nn.Linear(256, 784),\n", - " nn.Tanh())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Device setting\n", - "D = D.to(DEVICE)\n", - "G = G.to(DEVICE)\n", - "\n", - "# Binary cross entropy loss and optimizer\n", - "criterion = nn.BCELoss()\n", - "d_optimizer = optim.Adam(D.parameters(), lr=0.0002)\n", - "g_optimizer = optim.Adam(G.parameters(), lr=0.0002)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "total_step = len(train_loader)\n", - "for epoch in range(EPOCHS):\n", - " for i, (images, _) in enumerate(train_loader):\n", - " images = images.reshape(BATCH_SIZE, -1).to(DEVICE)\n", - " \n", - " # Create the labels which are later used as input for the BCE loss\n", - " real_labels = torch.ones(BATCH_SIZE, 1).to(DEVICE)\n", - " fake_labels = torch.zeros(BATCH_SIZE, 1).to(DEVICE)\n", - "\n", - " # Train Discriminator\n", - "\n", - " # Compute BCE_Loss using real images where BCE_Loss(x, y): - y * log(D(x)) - (1-y) * log(1 - D(x))\n", - " # Second term of the loss is always zero since real_labels == 1\n", - " outputs = D(images)\n", - " d_loss_real = criterion(outputs, real_labels)\n", - " real_score = outputs\n", - " \n", - " # Compute BCELoss using fake images\n", - " # First term of the loss is always zero since fake_labels == 0\n", - " z = torch.randn(BATCH_SIZE, 64).to(DEVICE)\n", - " fake_images = G(z)\n", - " outputs = D(fake_images)\n", - " d_loss_fake = criterion(outputs, fake_labels)\n", - " fake_score = outputs\n", - " \n", - " # Backprop and optimize\n", - " d_loss = d_loss_real + d_loss_fake\n", - " d_optimizer.zero_grad()\n", - " d_loss.backward()\n", - " d_optimizer.step()\n", - " \n", - " # Train Generator\n", - "\n", - " # Compute loss with fake images\n", - " z = torch.randn(BATCH_SIZE, 64).to(DEVICE)\n", - " fake_images = G(z)\n", - " outputs = D(fake_images)\n", - " \n", - " # We train G to maximize log(D(G(z)) instead of minimizing log(1-D(G(z)))\n", - " # For the reason, see the last paragraph of section 3. https://arxiv.org/pdf/1406.2661.pdf\n", - " g_loss = criterion(outputs, real_labels)\n", - " \n", - " # Backprop and optimize\n", - " d_optimizer.zero_grad()\n", - " g_optimizer.zero_grad()\n", - " g_loss.backward()\n", - " g_optimizer.step()\n", - " \n", - " if (i+1) % 200 == 0:\n", - " print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' \n", - " .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), \n", - " real_score.mean().item(), fake_score.mean().item()))\n", - " if (epoch+1) % 10 == 0 and (i+1) % 100 == 0 :\n", - " fake_images = np.reshape(fake_images.data.numpy()[0],(28, 28))\n", - " plt.imshow(fake_images, cmap = 'gray')\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 참고\n", - "본 튜토리얼은 다음 자료를 참고하여 만들어졌습니다.\n", - "\n", - "* [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.py" "b/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.py" deleted file mode 100644 index c6f0a77..0000000 --- "a/09-\352\262\275\354\237\201\355\225\230\353\251\260_\355\225\231\354\212\265\355\225\230\353\212\224_GAN/gan_explanation.py" +++ /dev/null @@ -1,323 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -# # 9.1 GAN으로 새로운 패션아이템 생성하기 -# *GAN을 이용하여 새로운 패션 아이템을 만들어봅니다* -# GAN을 구현하기 위해 그 구조를 더 자세히 알아보겠습니다. -# GAN은 생성자(Generator)와 판별자(Discriminator) 2개의 신경망으로 이루어져 있습니다. -# 생성자는 실제 데이터와 비슷한 가짜 데이터를 만들어냅니다. 생성자가 만든 가짜 데이터는 '가짜' 라는 레이블을 부여받고 -# Fashion MNIST의 이미지와 같은 '진짜' 데이터와 함께 판별자에 입력됩니다. -# 그러면 판별자는 진짜와 가짜 데이터를 구분하는 능력을 학습합니다. 여기서 재밌는점은 판별자가 가짜와 진짜를 제대로 분류할 때 마다 생성자에 대한 페널티는 늘어난다는 것입니다. -# 그러므로 생성자는 판별자가 좋은 퍼포먼스를 내는것을 방해하기 위해 더 진짜 데이터와 비슷한 데이터를 생성하게 됩니다. -# 이처럼 GAN은 이름 그대로 판별자와 생성자의 경쟁을 통해서 학습하는 모델입니다. - -# ## GAN 구현하기 - -# 지금까지 해온 것 처럼 구현에 필요한 라이브러리들을 임포트합니다. - -import os -import torch -import torchvision -import torch.nn as nn -import torch.optim as optim -from torchvision import transforms, datasets -from torchvision.utils import save_image -import matplotlib.pyplot as plt - - -# 생성자는 랜덤한 텐서를 입력받아 기존 데이터와 비슷한 데이터를 창작하는 '신경망' 입니다. 그러므로 생성자에 입력되는 랜덤 텐서가 어떻게 설정되느냐에 따라 같은 코드라도 결과물과 퍼포먼스 근소하게 달라질 여지가 있습니다. 그러므로 여러분들이 직접 이 책의 GAN 코드를 보면서 구현한 결과와 책에서 보여주는 결과를 최대한 비슷하게 만들어주기 위해 학습 도중 생성되는 모든 랜덤한 값을 동일하게 설정해 주겠습니다. - -torch.manual_seed(1) # reproducible - - -# EPOCHS 과 BATCH_SIZE 등 학습에 필요한 하이퍼 파라미터 들을 설정해 줍니다. - -# Hyper Parameters -EPOCHS = 100 -BATCH_SIZE = 100 -USE_CDA = torch.cuda.is_available() -DEVICE = -1#torch.device("cuda" if USE_CUDA else "cpu") -print("Using Device:", DEVICE) - - -# 학습에 필요한 데이터셋을 로딩합니다. - -# Fashion MNIST digits dataset -trainset = datasets.FashionMNIST('./.data', - train=True, - download=True, - transform=transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize((0.5,), (0.5,)) - ])) -train_loader = torch.utils.data.DataLoader( - dataset = trainset, - batch_size = BATCH_SIZE, - shuffle = True) - - -# 데이터의 로딩이 끝났으면 GAN의 생성자와 판별자를 구현합니다. -# 지금까지는 신경망 모델들을 파이썬의 객체로써 정의해 주었습니다. 그렇게 함으로써 신경망의 복잡한 기능과 동작들을 함수의 형태로 편리하게 정의해 줄 수 있었습니다. -# 그러나 이번 예제에서 구현할 생성자와 판별자는 비교적 단순한 신경망이므로, 좀 더 간소한 방법을 이용해 정의해 보겠습니다. -# Pytorch가 제공하는 Sequential 자료구조는 신경망의 forward() 동작에 필요한 동작들을 입력받아 이들을 차례대로 실행시키는 신경망 구조체를 만들어 줍니다. -# 생성자는 64차원의 랜덤한 텐서를 입력받아 이에 행렬곱(Linear)과 활성화 함수(ReLU, Tanh) 연산을 실행합니다. 생성자의 결과값은 784차원, 즉 Fashion MNIST 속의 이미지와 같은 차원의 텐서입니다. - -# Generator -G = nn.Sequential( - nn.Linear(64, 256), - nn.ReLU(), - nn.Linear(256, 256), - nn.ReLU(), - nn.Linear(256, 784), - nn.Tanh()) - - -# 판별자는 784차원의 텐서를 입력받습니다. 판별자 역시 입력된 데이터에 행렬곱과 활성화 함수를 실행시키지만, 생성자와 달리 판별자의 결과값은 입력받은 텐서가 진짜 Fashion MNIST 데이터일 확률값입니다. - -# Discriminator -D = nn.Sequential( - nn.Linear(784, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 1), - nn.Sigmoid()) - - -# 생성자와 판별자 학습에 쓰일 오차 함수와 최적화 알고리즘도 정의해 줍니다. - - -# Device setting -# D = D.to(DEVICE) -# G = G.to(DEVICE) - -# Binary cross entropy loss and optimizer -criterion = nn.BCELoss() -d_optimizer = optim.Adam(D.parameters(), lr=0.0002) -g_optimizer = optim.Adam(G.parameters(), lr=0.0002) - - -# 모델 학습에 필요한 준비는 끝났습니다. 그럼 본격적으로 GAN을 학습시키는 loop을 만들어 보겠습니다. - -total_step = len(train_loader) -for epoch in range(EPOCHS): - for i, (images, _) in enumerate(train_loader): - images = images.reshape(BATCH_SIZE, -1)#.to(-1) - - -# 데이터셋 속의 진짜 이미지에는 '진짜' 라는 레이블을, 반대로 생성자가 만든 이미지에는 '가짜'라는 레이블링을 해 줘야 합니다. 이 두 레이블을 나타내는 레이블 텐서를 정의해 줍니다. - -real_labels = torch.ones(BATCH_SIZE, 1)#.to(-1) -fake_labels = torch.zeros(BATCH_SIZE, 1)#.to(-1) - - -# 판별자는 실제 이미지를 보고 '진짜'라고 구분짓는 능력을 학습해야 합니다. 그러기 위해선 실제 이미지를 판별자 신경망에 입력시켜 얻어낸 결과값과 '진짜' 레이블 간의 오차값을 계산해야 합니다. - -outputs = D(images) -d_loss_real = criterion(outputs, real_labels) -real_score = outputs - - -# 다음으로는 생성자의 동작을 정의합니다. 생성자는 무작위한 텐서를 입력받아 실제 이미지와 같은 차원의 텐서를 배출해야합니다. - -z = torch.randn(BATCH_SIZE, 64)#.to(-1) -fake_images = G(z) - - -# 생성자가 만들어낸 fake_images를 판별자에 입력합니다. 이번엔 결과값과 '가짜' 레이블 간의 오차를 계산해야 합니다. - -outputs = D(fake_images) -d_loss_fake = criterion(outputs, fake_labels) -fake_score = outputs - - -# 실제 데이터와 가짜 데이터를 가지고 낸 오차를 더해줌으로써 판별자 신경망의 전체 오차가 계산됩니다. -# 그 다음 과정은 역전파 알고리즘과 경사 하강법을 통하여 판별자 신경망을 학습시키는 겁니다. - -d_loss = d_loss_real + d_loss_fake -d_optimizer.zero_grad() -d_loss.backward() -d_optimizer.step() - - -# 판별자를 학습시키는 코드를 모두 작성했으면 이제 생성자를 학습시킬 차례입니다. -# 생성자가 더 진짜같은 데이터셋을 만들어내려면, 생성자가 만들어낸 가짜 이미지를 판별자가 진짜 라고 착각하게 만들어야 합니다. -# 즉, 생성자의 결과물을 다시 판별자에 입력시켜, 그 결과물과 real_labels간의 오차를 최소화 시키는 식으로 학습을 진행해야 합니다. - -fake_images = G(z) -outputs = D(fake_images) -g_loss = criterion(outputs, real_labels) - - -# 그리고 마찬가지로 경사 하강법과 역전파 알고리즘을 사용해서 모델의 학습을 완료합니다. - -d_optimizer.zero_grad() -g_optimizer.zero_grad() -g_loss.backward() -g_optimizer.step() - - -# 학습을 진행하는 동안 오차를 확인하고 생성자의 결과물을 시각화하는 코드 또한 추가시켰습니다. - -if (i+1) % 200 == 0: - print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' - .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), - real_score.mean().item(), fake_score.mean().item())) - -if (epoch+1) % 10 == 0 and (i+1) % 100 == 0 : - fake_images = np.reshape(fake_images.data.numpy()[0],(28, 28)) - plt.imshow(fake_images, cmap = 'gray') - plt.show() - - -# 학습이 끝난 생성자의 결과물을 한번 확인해 보겠습니다. - -# ![generated_image0](./assets/generated_image0.png) -# ![generated_image1](./assets/generated_image1.png) -# ![generated_image2](./assets/generated_image2.png) -# ![generated_image3](./assets/generated_image3.png) -# ![generated_image4](./assets/generated_image4.png) - - - - - - - - - - - - - -# 전체 코드 - -import os -import torch -import torchvision -import torch.nn as nn -import torch.optim as optim -from torchvision import transforms, datasets -from torchvision.utils import save_image -import matplotlib.pyplot as plt -import numpy as np - - -torch.manual_seed(1) # reproducible - - -# Hyper Parameters -EPOCHS = 100 -BATCH_SIZE = 100 -USE_CDA = torch.cuda.is_available() -DEVICE = torch.device("cuda" if USE_CUDA else "cpu") - -print("Using Device:", DEVICE) - - -# Fashion MNIST digits dataset -trainset = datasets.FashionMNIST('./.data', - train=True, - download=True, - transform=transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize((0.5,), (0.5,)) - ])) -train_loader = torch.utils.data.DataLoader( - dataset = trainset, - batch_size = BATCH_SIZE, - shuffle = True) - - -# Discriminator -D = nn.Sequential( - nn.Linear(784, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 256), - nn.LeakyReLU(0.2), - nn.Linear(256, 1), - nn.Sigmoid()) - - -# Generator -G = nn.Sequential( - nn.Linear(64, 256), - nn.ReLU(), - nn.Linear(256, 256), - nn.ReLU(), - nn.Linear(256, 784), - nn.Tanh()) - - - -# Device setting -D = D.to(DEVICE) -G = G.to(DEVICE) - -# Binary cross entropy loss and optimizer -criterion = nn.BCELoss() -d_optimizer = optim.Adam(D.parameters(), lr=0.0002) -g_optimizer = optim.Adam(G.parameters(), lr=0.0002) - - -total_step = len(train_loader) -for epoch in range(EPOCHS): - for i, (images, _) in enumerate(train_loader): - images = images.reshape(BATCH_SIZE, -1).to(DEVICE) - - # Create the labels which are later used as input for the BCE loss - real_labels = torch.ones(BATCH_SIZE, 1).to(DEVICE) - fake_labels = torch.zeros(BATCH_SIZE, 1).to(DEVICE) - - # Train Discriminator - - # Compute BCE_Loss using real images where BCE_Loss(x, y): - y * log(D(x)) - (1-y) * log(1 - D(x)) - # Second term of the loss is always zero since real_labels == 1 - outputs = D(images) - d_loss_real = criterion(outputs, real_labels) - real_score = outputs - - # Compute BCELoss using fake images - # First term of the loss is always zero since fake_labels == 0 - z = torch.randn(BATCH_SIZE, 64).to(DEVICE) - fake_images = G(z) - outputs = D(fake_images) - d_loss_fake = criterion(outputs, fake_labels) - fake_score = outputs - - # Backprop and optimize - d_loss = d_loss_real + d_loss_fake - d_optimizer.zero_grad() - d_loss.backward() - d_optimizer.step() - - # Train Generator - - # Compute loss with fake images - z = torch.randn(BATCH_SIZE, 64).to(DEVICE) - fake_images = G(z) - outputs = D(fake_images) - - # We train G to maximize log(D(G(z)) instead of minimizing log(1-D(G(z))) - # For the reason, see the last paragraph of section 3. https://arxiv.org/pdf/1406.2661.pdf - g_loss = criterion(outputs, real_labels) - - # Backprop and optimize - d_optimizer.zero_grad() - g_optimizer.zero_grad() - g_loss.backward() - g_optimizer.step() - - if (i+1) % 200 == 0: - print('Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}' - .format(epoch, EPOCHS, i+1, total_step, d_loss.item(), g_loss.item(), - real_score.mean().item(), fake_score.mean().item())) - if (epoch+1) % 10 == 0 and (i+1) % 100 == 0 : - fake_images = np.reshape(fake_images.data.numpy()[0],(28, 28)) - plt.imshow(fake_images, cmap = 'gray') - plt.show() - - -# ## 참고 -# 본 튜토리얼은 다음 자료를 참고하여 만들어졌습니다. -# * [yunjey/pytorch-tutorial](https://github.com/yunjey/pytorch-tutorial) - MIT License diff --git a/README.md b/README.md index 497a9f7..b79649e 100644 --- a/README.md +++ b/README.md @@ -46,9 +46,9 @@ * [프로젝트 1] [영화 리뷰 감정 분석](07-순차적인_데이터를_처리하는_RNN/01-text-classification.ipynb) * [프로젝트 2] [Seq2Seq 기계 번역](07-순차적인_데이터를_처리하는_RNN/02-sequence-to-sequence.ipynb) * 더 보기 -8. [딥러닝 해킹하기 Adversarial Attack](08-딥러닝_해킹하기_Adversarial_Attack) - Adversarial Attack +8. [딥러닝을 해킹하는 Adversarial Attack](08-딥러닝을_해킹하는_Adversarial_Attack) - Adversarial Attack * [개념] Adversarial Attack 이란? - * [프로젝트 1] [FGSM 공격](08-딥러닝_해킹하기_Adversarial_Attack/01-fgsm-attack.ipynb) + * [프로젝트 1] [FGSM 공격](08-딥러닝을_해킹하는_Adversarial_Attack/01-fgsm-attack.ipynb) * 더 보기 9. [경쟁하며 학습하는 GAN](09-경쟁하며_학습하는_GAN) - GAN을 이용하여 새로운 패션 아이템을 만들어봅니다. * [개념] GAN 기초