From 48163c792cf8b1bba43a931199d178a9b5138932 Mon Sep 17 00:00:00 2001 From: D-X-Y <280835372@qq.com> Date: Fri, 11 Jun 2021 11:46:18 +0800 Subject: [PATCH] Update xmisc.scheduler/sampler --- .../{basic_test.yml => test-basic.yml} | 0 .github/workflows/test-misc.yml | 41 ++ ...el_test.yml => test-super-layer_model.yml} | 0 exps/basic/xmain.py | 10 +- .../random-search-transformer.ipynb | 0 notebooks/spaces-xmisc/scheduler.ipynb | 119 ++++ .../synthetic-data.ipynb | 0 .../synthetic-env.ipynb | 0 .../synthetic-visualize-env.ipynb | 0 .../test-transformer-encoder.ipynb | 0 notebooks/spaces/test.py | 76 --- scripts/experimental/train-vit.sh | 2 +- tests/test_misc_scheduler.py | 73 ++ xautodl/xmisc/__init__.py | 20 + xautodl/xmisc/sampler_utils.py | 32 + xautodl/xmisc/scheduler_utils.py | 632 ++++++++++++++---- xautodl/xmodels/__init__.py | 3 - 17 files changed, 807 insertions(+), 201 deletions(-) rename .github/workflows/{basic_test.yml => test-basic.yml} (100%) create mode 100644 .github/workflows/test-misc.yml rename .github/workflows/{super_model_test.yml => test-super-layer_model.yml} (100%) rename notebooks/{spaces => spaces-xmisc}/random-search-transformer.ipynb (100%) create mode 100644 notebooks/spaces-xmisc/scheduler.ipynb rename notebooks/{LFNA => spaces-xmisc}/synthetic-data.ipynb (100%) rename notebooks/{LFNA => spaces-xmisc}/synthetic-env.ipynb (100%) rename notebooks/{LFNA => spaces-xmisc}/synthetic-visualize-env.ipynb (100%) rename notebooks/{spaces => spaces-xmisc}/test-transformer-encoder.ipynb (100%) delete mode 100644 notebooks/spaces/test.py create mode 100644 tests/test_misc_scheduler.py create mode 100644 xautodl/xmisc/sampler_utils.py diff --git a/.github/workflows/basic_test.yml b/.github/workflows/test-basic.yml similarity index 100% rename from .github/workflows/basic_test.yml rename to .github/workflows/test-basic.yml diff --git a/.github/workflows/test-misc.yml b/.github/workflows/test-misc.yml new file mode 100644 index 0000000..ece0105 --- /dev/null +++ b/.github/workflows/test-misc.yml @@ -0,0 +1,41 @@ +name: Test Xmisc +on: + push: + branches: + - main + pull_request: + branches: + - main + + +jobs: + build: + strategy: + matrix: + os: [ubuntu-16.04, ubuntu-18.04, ubuntu-20.04, macos-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install XAutoDL from source + run: | + python setup.py install + + - name: Test Xmisc + run: | + python -m pip install pytest numpy + python -m pip install torch torchvision + python -m pip install parameterized + echo $PWD + echo "Show what we have here:" + ls + python --version + python -m pytest ./tests/test_misc* -s + shell: bash diff --git a/.github/workflows/super_model_test.yml b/.github/workflows/test-super-layer_model.yml similarity index 100% rename from .github/workflows/super_model_test.yml rename to .github/workflows/test-super-layer_model.yml diff --git a/exps/basic/xmain.py b/exps/basic/xmain.py index 9ee0a16..0274ff0 100644 --- a/exps/basic/xmain.py +++ b/exps/basic/xmain.py @@ -46,8 +46,7 @@ def main(args): train_loader = torch.utils.data.DataLoader( train_data, - batch_size=args.batch_size, - shuffle=True, + batch_sampler=xmisc.BatchSampler(train_data, args.batch_size, args.steps), num_workers=args.workers, pin_memory=True, ) @@ -57,6 +56,7 @@ def main(args): shuffle=False, num_workers=args.workers, pin_memory=True, + drop_last=False, ) logger.log("The training loader: {:}".format(train_loader)) @@ -73,6 +73,9 @@ def main(args): logger.log("The loss is {:}".format(loss)) model, loss = torch.nn.DataParallel(model).cuda(), loss.cuda() + scheduler = xmisc.LRMultiplier( + optimizer, xmisc.get_scheduler(args.scheduler, args.lr), args.steps + ) import pdb @@ -241,10 +244,11 @@ if __name__ == "__main__": "--valid_data_config", type=str, help="The validation dataset config path." ) parser.add_argument("--data_path", type=str, help="The path to the dataset.") - parser.add_argument("--algorithm", type=str, help="The algorithm.") # Optimization options parser.add_argument("--lr", type=float, help="The learning rate") parser.add_argument("--weight_decay", type=float, help="The weight decay") + parser.add_argument("--scheduler", type=str, help="The scheduler indicator.") + parser.add_argument("--steps", type=int, help="The total number of steps.") parser.add_argument("--batch_size", type=int, default=2, help="The batch size.") parser.add_argument("--workers", type=int, default=4, help="The number of workers") # Random Seed diff --git a/notebooks/spaces/random-search-transformer.ipynb b/notebooks/spaces-xmisc/random-search-transformer.ipynb similarity index 100% rename from notebooks/spaces/random-search-transformer.ipynb rename to notebooks/spaces-xmisc/random-search-transformer.ipynb diff --git a/notebooks/spaces-xmisc/scheduler.ipynb b/notebooks/spaces-xmisc/scheduler.ipynb new file mode 100644 index 0000000..4ecc3c3 --- /dev/null +++ b/notebooks/spaces-xmisc/scheduler.ipynb @@ -0,0 +1,119 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The config dir path: /Users/xuanyidong/Desktop/AutoDL-Projects/configs\n" + ] + } + ], + "source": [ + "#####################################################\n", + "# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.06 #\n", + "#####################################################\n", + "import os, sys, math\n", + "import numpy as np\n", + "from pathlib import Path\n", + "\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import torch\n", + "from xautodl.xmisc.scheduler_utils import CosineParamScheduler, MultiStepParamScheduler\n", + "from xautodl.xmisc.scheduler_utils import LRMultiplier, WarmupParamScheduler\n", + "\n", + "__file__ = os.path.dirname(os.path.realpath(\"__file__\"))\n", + "\n", + "config_dir = (Path(__file__).parent / \"..\" / \"configs\").resolve()\n", + "print(\"The config dir path: {:}\".format(config_dir))\n", + "\n", + "def draw(steps, lrs):\n", + " plt.close()\n", + " dpi, width, height = 200, 1400, 800\n", + " figsize = width / float(dpi), height / float(dpi)\n", + " fig = plt.figure(figsize=figsize)\n", + " ax = fig.add_subplot(111)\n", + "\n", + " plt.plot(steps, lrs)\n", + " plt.title(\"Plot Cosine Decayed LR with Warmup\")\n", + " plt.xlabel(\"steps\")\n", + " plt.ylabel(\"learning rate\")\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbkAAAEWCAYAAAD7HukTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAABBaUlEQVR4nO3dd3hUZdrH8e+dShIg9N577yBgryCK2Luuuq6rq7vruq762nvX1bUhuuraxY4KKhaKYqH3FkILvQbIhExmcr9/nBMdYxIGyORMuT/XNVdmzpyZ+c3JZO485zzneURVMcYYY+JRktcBjDHGmEixImeMMSZuWZEzxhgTt6zIGWOMiVtW5IwxxsQtK3LGGGPilhU5UyERmSQil3udozIicrOIvOh1jmglIqtE5Divc5QSkT0i0q6S+6Mqr4l9VuQSnPulUuh++WwSkZdFpOZ+PkcbEVERSdnHep1E5F0R2Soi+SIyT0SuE5HkA82vqverapUXYhE5SkRK3O2yR0TyRGSsiAys6tfyioi8IiL3VnCfikiB+97XicjjB/N7KqWqNVU1d1+vvy8ikuJmGxSy7AI3d9llSw42t4ldVuQMwEhVrQn0AwYCt1b1C4hIe+AnYC3QU1WzgbOAAUCtqn69KrLe3S61gMHAEmCqiBzrbaxq09t9/0cC5wCXeZznF6oaAH7AyVbqCJzfUdllU/bnucVh341xwn6R5hequg6YAPQoe5+IJInIrSKyWkQ2i8irIpLt3l36JbLT/e96SDlPfxcwTVWvU9UN7ustVdXzVXWn+xqniMhCEdnp7irtGvL6N7otit0isrS00IjInSLyunu9tEX5BxFZ47YYbynzHm4SkRUiss1tmdULY7uoquap6u3Ai8BDIc/ZRUQmish2N9fZIfdliMhj7jbLF5HvRCTDve9dEdnoLp8iIt3d5QPdFnVKyPOcISJzwnkPInKR+3rbQt/7wVDVHOB7oE9594vIpSLyScjtHBEZG3J7rYj0ca+riHQQkSuAC4Ab3M/MJyFP2cdt5eeLyDsiUqOCaFNwilipw3F+N2WXTRGRuiLyqYhsEZEd7vUWIRknich9IvI94APauVn/IiLL3c/dPSLSXkR+EJFd7rZPcx9/iYh8V2a7qIh0cK+/IiKj3c/KbhGZLCKtK3hfpgpZkTO/EJGWwAhgdjl3X+JejgbaATWBp937Sr9U6ri7o34o5/HHAe9V8tqdgLeAa4GGwHjgExFJE5HOwDXAQFWtBQwDVlXyVg4DOgPHAreHFMu/Aafi/KffDNgBPFPJ85TnA6CfiGSJSBYwEXgTaAScBzxbWrCAR4H+wFCgHnADUOLeNwHo6D5uFvAGgKpOB7YBx4e85oXAa/t6DyLSDXgOuMi9rz7QgoMkIl1wikVOBatMBg53C3BTIBU41H1s6WdlXugDVHUMznt+2P3MjAy5+2xgONAW6IXzuSvPFOBQ93UbAFnAWGBQyLIu7npJwMtAa6AVUMivn99SFwFX4LTcV7vLhuP8Dgfj/P7G4BTnljj/DJ5XQbbyXADcAzQA5rjv30SaqtolgS84xWIPsBPnD/tZIMO9bxJwuXv9a+AvIY/rDBQDKUAbQIGUSl6nGBheyf23AWNDbicB64CjgA7AZpxCmVrmcXcCr7vXS3O0CLn/Z+Bc9/pi4NiQ+5qWvody8hwF5JWzvIv7Gs1xduFNLXP/88Adbv5CnF1++/od1HGfM9u9fSPwhnu9Hk7Loum+3gNwO/B2yH1ZgB84roLXfQW4t4L7FNgFFLjX3wLSK3kPa3F2d5+LUwh+drfVpcC4Ms/boaLXdz+PF4bcfhgYXcFr1gD2Ar2B00K22Y8hy1ZW8Ng+wI6Q25OAu8vZBoeG3J4J3Bhy+zHgCff6JcB35Tw+9L2G/m5qAkGgZVX/Tdvlt5dKOwqYhHGqqn61j3Wa8et/t7jXU4DGYb7GNpwv5LCeX1VLRGQt0FxVJ4nItTgFrbuIfAFcp6rrK3iujSHXfThfKOD8F/+hiJSE3B9038O6MN9Hc5wvr53u8x0iIjtD7k/BaXU1wPkSXlH2CcTpwHEfzjHJhvzaumsA5AOvA4vF6QB0Nk4h3RDGe2iGU2wAUNUCEdkW5vsqTz83/1nAgzhFs6iCdSfz6z8kk3G2z5HAEPf2/ij7+2tW3kqquldEfsbZk9AOmOre9V3IsikAIpIJ/BunZVbXXa+WiCSratC9/cu2C7Ep5HphObebhPmefvP8qrpHRLZT5ndmqp7trjThWo/zBVuqFRDA+aMPZyqLr4Azwn1+ERGcXULrAFT1TVU9zF1HCTkuth/WAieqap2QSw11jkWG6zRglqoWuM83uczz1VTVq4CtOK2M9uU8x/nAKJyWaTZOCxRA4Jdjoz+4r3URv+6q3Nd72ICzzZwnc77Y6+/He/sddYx189xeyaqlRe5w9/pknCJ3JBUXuaqYAqX0uNzh/FrkpoYsKz1e/E+cvQ+HqGptft3FLlWUpwDILL0hIuUVv9DfTU2cVnpF/6iZKmJFzoTrLeAfItLW/QO9H3hHnV5uW3BaIxWe/4SzC2+oiDxS+gXgdkB4XUTq4BxLOUlEjhWRVJwvpSJgmoh0FpFjRCQdp3AU4rRe9tdo4L7SA/4i0lBERu3rQeJoLiJ3AJcDN7t3fQp0cjt7pLqXgSLSVVVLgJeAx0WkmYgki8gQ9z3Uct/bNpwvxvvLedlXcY4B9QQ+DPM9vAecLCKHuR0i7mbff+PJIlIj5JJWwXoPAldU8OUNTiE7GmdXdx5OoRmOU2TLO8YLzj9IlX1mwjHFfd2WwCJ32Xc4BbcPvxa5Wjifm53idNS54yBft6y5OHsZ+rgdZe4sZ50RIb+be4CfVNVacRFmRc6E6yWcFsUUYCVOsfkrgKr6cHa/fS9Oz8jBZR+sqitwdl21ARaKSD7wPjAD2K2qS3E6WDyF0woaiXNqgx9Ix/mS3YqzK6sRvxaa/fEkMA74UkR24xy7OaSS9ZuJyB6cY5bTcQrOUar6pfuedgMn4ByHWu9me8jNC3A9MN997Hb3viScArYap5W6yM1R1oe4uybdVuM+34OqLgSuxukIswGnU0rePrbJTThf/qWXb8pbSVXn4xSyf1Vw/zKc7TTVvb0LyAW+D9kdWNZ/gW7uZ+ajfeSsyDSc1vBPqu6BMNVtOP94bVbV5e56TwAZOJ+hH4HPD/D1yuW+/7tx9lgsxym0Zb2JU1y343RmuaAqM5jyifu5MMZEGRFZAfw5jOOlJsqJyCs4HZmq/BxUUzlryRkThUTkDJxjROW2rIwx4bHelcZEGRGZBHQDLnKP7RljDpDtrjTGGBO3bHelMcaYuBVzuysbNGigbdq08TqGMcaYKDJz5sytqtqw7PKYK3Jt2rRhxowZXscwxhgTRURkdXnLbXelMcaYuGVFzhhjTNyyImeMMSZuWZEzxhgTt6zIGWOMiVsRLXIiMlxElopIjojcVM79R4kzxf0c91LZVB7GGGPMfonYKQTuxJDPAMfjjIQ+XUTGqeqiMqtOVdWTI5XDGGNM4orkeXKDgBxVzQUQkbdxJoosW+SMMcYkAFVl3c5CNuTvZUP+XjbmF5KZlsKFg1vv+8EHKJJFrjm/ndY9j/Ln7hoiInNx5uO63p0T6zdE5ArgCoBWrVpFIOrv5W7ZQ3FQ6dS4Js4k1cYYY/ZHsESZsWo7P6/czuy1O5m9Zgc7fMW/Wad3i+yYLXLlVYayo0HPAlqr6h4RGQF8BHT83YNUxwBjAAYMGFAtI0pf/uoMcrcU0K5BFsN7NGFEz6b0aJ5dHS9tjDExK1iiTFuxlQkLNvLlwo1s3eNHBDo0rMnx3RrTq0UdWtXLpGl2DZpk16BWjdSI5olkkcvDmZK+VAuc1tov3NmDS6+PF5FnRaSBqm6NYK6wbC/w07tlHWrXSOH5Kbk8O2kFvVvW4dKhbRjRsylpKdYx1RhjSuUXFjN2+lr+98Mq8nYUkpmWzNFdGjGiR1MO69iA7IzIFrOKRLLITQc6ikhbYB1wLnB+6Aoi0gTYpKoqIoNwentui2CmsPn8QYa0q89NJ3ZhR4GfT+at55Vpq7j2nTncP34xlx7alkuGtiEjLdnrqMYY45nNu/byzLc5vDszD58/yKC29fi/E7tybNdG1Ej1/vsxYkVOVQMicg3wBZAMvKSqC0XkSvf+0cCZwFUiEgAKgXM1Cia4Kw6W4A+UkOkWsLpZaVw8pA0XHtKaKcu38N/vVvLQ50t4ZdpK/n5sJ84e0IKUZGvZGWMSx669xYyZnMt/v1tJcbCEUX2ac+mhbaLusE5EZyFQ1fHA+DLLRodcfxp4OpIZDoTPHwT4pciVSkoSjurciKM6N+Lnldt5cMJibv5wPi9+l8udI7tzRKffzfJgjDFxpaREefPnNTz25VJ2+IoZ2bsZ/zy+E20aZHkdrVwxN9VOdSh0i1xWesWbZ1Dberx/1VAmLtrEAxOWcPFLP3NGvxbcelJX6malVVdUY4ypNjmb9/B/H8xj+qodDG5Xj1tGdKNni+hquZVlRa4cBf4A8PuWXFkiwgndm3BEp4Y89c1ynp+cy6Slm7lrVHdO7tWsOqIaY0zEBYIlPDdpBU99k0NGWjIPn9mLs/q3iInTq6zIlaPwl92V4W2eGqnJ/GtYF07u1Yyb3p/HNW/OZuqyrdx5SnfrmGKMiWkb8gv521uzmb5qByf1bModp3SjUa0aXscKmxW5chQUOS25rP0sUF2b1ub9q4by76+W8eykFcxas4NnLuhHp8a1IhHTGGMi6uvFm/jnu3PxB0r49zm9Oa1vC68j7TfrEliO0o4nB9IKS0lO4l/DuvDqZYPY4fNzytPf8eHsvKqOaIwxERMsUR6YsJg//m8GzbIz+PSvh8VkgQMrcuXyhdHxZF8O79iQ8X8/nN4t6vCPd+byyBdLKCnx/OwIY4yp1J6iAH9+bQbPT87l/ENa8cFfhtKuYU2vYx0wK3LlKO14knGQJzI2qlWD1/54COcObMkz367gL2/Mwuc+tzHGRJu8HT7OfG4a3y7dwt2junP/aT2j4oTug2FFrhy+0mNyB9GSK5WWksQDp/fk1pO68uWijZz9/A9s2V100M9rjDFVacG6fE59Zhrrdhby8iUDuXhIG68jVQkrcuXwFZd/MviBEhEuP7wdL/5hADmb93D28z+wbmdhlTy3McYcrJ9yt3HumB9JT0niw78MjauBLazIlcNXFCRJIL2KB2E+pktjXv/jIWzdU8SZz00jZ/OeKn1+Y4zZX98s2cTFL/1Mk+wavHfVEDo0iq/e4FbkyuHzB8lKS4nIiY4D2tTj7SsGUxws4eznf2DBuvwqfw1jjAnHx3PWccWrM+nUuBZj/zyEptkZXkeqclbkyuHzB8hMj9zB1u7Nsnn3yqFkpCZzwYs/sWj9rn0/yBhjqtC4uev5xztz6N+6Lm/+6RDqxelwhFbkylHgD4Y92smBatsgi7evGExmWjIX/vcnlm7cHdHXM8aYUuPnb+Af78xhYJt6vHLpoIhPXOolK3LlKPQHqqzTSWVa1svkrT8NJjVZuODFH8nZbIXOGBNZXy7cyN/emk3flnV46ZKBcT/0oBW5chQUBaulyAG0aZDFm38aDAjnvfATq7cVVMvrGmMSz+RlW7j6zVn0aJ7Ny5cOrJLTpKKdFbly+PyBiO+uDNW+YU3e+tMhFAdL+MNLP7N1j51HZ4ypWnPX7uSq12fSsVEt/ndZfO+iDGVFrhw+f5CsCHY8KU/HxrV46ZKBbNy1l0tfns6eIhsZxRhTNXK37OHSV6ZTv2Yar1w2kOyMxChwYEWuXD5/kIzU6m/G92tVl2fO78eiDbu46vWZ+AMl1Z7BGBNfNu/ay8Uv/QzAq5cdElPT5FQFK3Ll8PkD1d6SK3Vs18Y8cHpPpi7fyo3vz0PVBnU2xhyYgqIAl7w8ne0Ffl6+ZCBtG2R5Hanaxf9RxwNQHacQVObsAS3ZmL+Xxycuo33DLK45pqNnWYwxsSlYovz97Tks2biL/14ykN4t63gdyRNW5MoIBEvwB0qqrXdlRf56TAdyt+zh0S+X0a5hTUb0bOppHmNMbHn48yV8tXgTd47sxtGdG3kdxzO2u7KMqh6c+UCJCA+e0Yt+repw3dg5zM+z4b+MMeEZO2Mtz0/J5cLBrfjD0DZex/GUFbkyfEWlRc77Rm6N1GSev2gA9bPSufzV6WzatdfrSMaYKPdT7jZu+XA+h3VowB0ju0dkDN5YYkWujNIJU73qeFJWw1rpvPiHAezeG7Ael8aYSm3IL+TqN2fRsm4mz5zfj9Rk+4q3LVBGoT96WnKlujatzcNn9mLWmp3c8+kir+MYY6JQUSDIVa/PotAf5PmL+pOdmTjnwlUmer7Jo0SBexK218fkyjq5VzPm5eUzZkouvVpkc9aAll5HMsZEkbs/WcSctTt59oJ+dGwcX3PCHQxryZURLR1PynPDsM4MaVefWz5aYPPQGWN+MXb6Wt74aQ1/PrKd9cQuw4pcGaUdT6Jx4NKU5CSePr8vDbLSuPL1meQXFnsdyRjjsYXr87n14wUc2qE+/zqhs9dxoo4VuTJKO55kpEZfSw6gfs10nr6gHxvz93KTjYhiTEIrKArw1zdnUycjlf+c25cU62jyO7ZFyijteBKNLblS/VrV5fphnZmwYCNv/LTG6zjGGI/c/vFCVm4r4Ilz+1C/ZrrXcaKSFbkySlty0XhMLtQVh7fjiE4NufvTRSzesMvrOMaYavbBrDzen5XHX4/pyND2DbyOE7UiWuREZLiILBWRHBG5qZL1BopIUETOjGSecPiKgiQJpKdEd/1PShIeP7s32RmpXPPmLHx+m5rHmESRu2UPt360gEFt6vG3Yzp4HSeqReybXESSgWeAE4FuwHki0q2C9R4CvohUlv3h8wfJSkuJiVECGtRM54lz+pC7tYA7Pl7odRxjTDXYWxzkmjdnk5aSxJPn9bHjcPsQya0zCMhR1VxV9QNvA6PKWe+vwPvA5ghmCZvPHyAjyndVhjq0QwOuPqoD787M46PZ67yOY4yJsAcnLGHRhl08emZvmmZneB0n6kWyyDUH1obcznOX/UJEmgOnAaMreyIRuUJEZojIjC1btlR50FDOrODR2+mkPNce15EBretyy4fzWb2twOs4xpgImbhoE69MW8Vlh7bluG6NvY4TEyJZ5Mrb31e2v/sTwI2qGqzsiVR1jKoOUNUBDRs2rKp85fL5A1Hf6aSslOQknjyvL0lJwj/HziVYYqcVGBNvtu4p4qb359GtaW1uPNHOhwtXJItcHhA69lQLYH2ZdQYAb4vIKuBM4FkROTWCmfapoCgYc0UOoHmdDO4e1Z0Zq3fwwtRcr+MYY6qQqvJ/H8xn994A/z6nD+kpsfcd5ZVIFrnpQEcRaSsiacC5wLjQFVS1raq2UdU2wHvAX1T1owhm2idfsbezgh+MU/s058QeTXj8y2V2WoExceT9WeuYuGgT1w/rROcmNi7l/ohYkVPVAHANTq/JxcBYVV0oIleKyJWRet2D5SuKvd2VpUSEe0/tQe2MVP7xzhyKApXuBTbGxIC8HT7uGreQQW3q8cfD2nkdJ+ZEtO+pqo5X1U6q2l5V73OXjVbV33U0UdVLVPW9SOYJh88fuy05cIb9evD0nizZuJsnv1rudRxjzEEoKVH+9e48SlR59KzeJCdF/6lN0cZOsCjD5w9EzYSpB+q4bo05Z0BLRk9ewczV272OY4w5QK9MW8UPudu47eRutKqf6XWcmGRFrowCfzCmzpOryK0nd6VZnQyuGzv3lznyjDGxI2fzbh76fAnHdGnEOQNt/sgDZUUuRCBYgj9QQlYM764sVatGKo+e1Zs12308OGGJ13GMMfshECzhn2PnkpmWzINn9IyJEZiilRW5ENE8YeqBGNyuPpcObctrP67m55W229KYWPHS9yuZm5fPXaN60KhWDa/jxDQrciFKJ0yN5Y4nZV0/rBMt62Vw4/vz2FtsvS2NiXYrtxbw2JfLOK5rY0b2slm+D5YVuRClI/nHeseTUJlpKTx4ei9Wbi3gCettaUxUKylRbnp/HmkpSdx3Wg/bTVkFrMiF8LkTpkbrrOAH6tAODThnQEtemJrL/Lx8r+MYYyrw1vQ1/LRyO7eM6Erj2rabsipYkQtR2gsx1gZoDsfNJ3WlflYa/3pvLsXBEq/jGGPKWL+zkAfGL2Fo+/rWm7IKWZELEW8dT0JlZ6Ry76k9WLJxN6MnrfA6jjEmhKpy60cLCJSU8ODpvWw3ZRWyIhciHjuehDqhexNO6tWUp77JYfmm3V7HMca4Pp6znm+WbOb6EzrbSd9VzIpciNKOJ/HYkit11yndyUxP5ob359mUPMZEgW17irjrk4X0aVmHSw9t63WcuGNFLkRpx5N4PCZXqkHNdG4/uRuz1+zkzZ/XeB3HmIR33/jF7N4b4KEzetnYlBFgRS5EQQK05ABO69ucoe3r8/DnS9i8e6/XcYxJWNNWbOWDWev485HtbAqdCLEiF6LQHyRJID0lvjdL6ZQ8RcUl3PPpYq/jGJOQigJBbv1wAa3qZfLXYzp6HSduxfe3+X5yZgVPSYieTe0a1uQvR7fnk7nrmbxsi9dxjEk4z01aQe7WAu45tQc14uzc3GhiRS6Ezx+7E6YeiKuOak+7Blnc9tECG/LLmGqUu2UPz367gpG9m3Fkp4Zex4lrVuRC+PzBuO50UlZ6SjL3ntaDNdt9PPWNDfllTHUoPScuPTWJ207u6nWcuGdFLoTPH4i7Ib32ZWj7BpzRrwVjpuTauXPGVIMPZ69j2opt3Di8i80wUA2syIVwWnKJVeQAbjmpK1npKdz84XxK7Nw5YyJmp8/PfZ8tpm+rOpw/qJXXcRKCFbkQBf5g3I52Upl6WWncfGJXpq/awXuz8ryOY0zceujzpewsLOa+U3uSZOfEVQsrciF8RYnV8STUmf1b0L91XR6asIR8X7HXcYyJO3PX7uTt6Wu4ZGgbujWr7XWchGFFLoQvQVtyAElJwt2jurPD5+fxiUu9jmNMXCkpUW7/eAENaqZz7XF2Tlx1siIXwucPJOQxuVLdm2Vz0eDWvPbjahaut3nnjKkq78xYy9y8fG4Z0ZVaNVK9jpNQ9lnkRKSTiHwtIgvc271E5NbIR6t+Bf4gGQm6u7LUdSd0pm5mGrd/vNA6oRhTBXb6/Dz8+RIGta3HqD7NvI6TcMJpyb0A/B9QDKCq84BzIxnKC4FgCf5ACVkJuruyVHZGKjee2IWZq3fwwex1XscxJuY98sVSdu0NcPeo7gkxmlK0CafIZarqz2WWBSIRxkvxPGHq/jqzXwv6tqrDgxMWk19onVCMOVDz8pzZPv4wpA1dmlhnEy+EU+S2ikh7QAFE5ExgQ0RTeaDQH98Tpu6PpCThnlE92Fbg598Tl3kdx5iY5HQ2WUj9rHSuPd46m3glnCJ3NfA80EVE1gHXAldGMpQXCoqcxmkidzwJ1aN5Nhce0ppXf1jFovW7vI5jTMx5d+Za5qzdyc0julDbOpt4Jpwip6p6HNAQ6KKqh4X5uJhSOmFqog3rVZnrT+hMncw07hi3AFXrhGJMuHb6/Dz0+VIGtqnLaX2bex0noYVTrN4HUNUCVS0d3PC9yEXyRiLMCr6/sjNTuXF4Z6av2sGH1gnFmLA99uUy8guLuXtUD+ts4rEKi5yIdBGRM4BsETk95HIJENaooiIyXESWikiOiNxUzv2jRGSeiMwRkRkictgBv5ODlCizgu+vs/q3pE/LOtw/fgm79lonFGP2ZcG6fF7/aTUXDW5N16bW2cRrlbXkOgMnA3WAkSGXfsCf9vXEIpIMPAOcCHQDzhORbmVW+xrorap9gMuAF/cvftXxFVnHk/L82gmliCe/sul4jKmMqnLHuIXUz0rjH8d38jqOASr8RlfVj4GPRWSIqv5wAM89CMhR1VwAEXkbGAUsCnmNPSHrZ+H24PSCz1pyFerZIptzB7bkf9NWcd6gVnRoVNPrSMZEpXFz1zNz9Q4ePrMX2RnW2SQahHNMbraIXC0iz4rIS6WXMB7XHFgbcjvPXfYbInKaiCwBPsNpzf2OiFzh7s6csWXLljBeev/5/HaeXGX+eUJnMtKSuefTRdYJxZhy+PwBHhi/hJ7NszmzXwuv4xhXOEXuNaAJMAyYDLQAwplds7yjrb/7dlTVD1W1C3AqcE95T6SqY1R1gKoOaNgwMlPFW8eTyjWomc7fj+3I5GVb+HbpZq/jGBN1Rk9awcZde7nzlG42jU4UCafIdVDV24ACVf0fcBLQM4zH5QEtQ263ANZXtLKqTgHai0iDMJ67yvn8AZIE0lPi7uyIKnPxkDa0a5jFPZ8uxh8o8TqOMVFj7XYfz0/JZVSfZvRvXc/rOCZEON/opV3qdopIDyAbaBPG46YDHUWkrYik4Yx3OS50BRHpIG7/WhHpB6QB28LMXqUKipxpdqy7b8XSUpK47eRurNxawCvTVnodx5io8eCEJSSJcNOJXbyOYsoIp8iNEZG6wK04RWoR8NC+HqSqAeAa4AtgMTBWVReKyJUiUjpiyhnAAhGZg9MT8xz16IBPYXHiTpi6P47u3IhjujTiP1/nsHn3Xq/jGOO5H3O38dn8DVx1VHuaZmd4HceUUWmRE5EkYJeq7lDVKaraTlUbqerz4Ty5qo5X1U6q2l5V73OXjVbV0e71h1S1u6r2UdUhqvrdQb+jA1RQFLTjcWG69aSuFAWCPPqFTa5qEluwRLnrk0U0r5PBFUe08zqOKUelRU5VS3BaY3HP5w/YkF5hatewJpce2pZ3Z+YxL2+n13GM8czb09eweMMubh7RlRr2/RGVwtldOVFErheRliJSr/QS8WTVzOcP2uDM++GaYzpQPyuNO8cttFMKTELK9xXz6BdLOaRtPUb0bOJ1HFOBcIrcZTgzEUwBZrqXGZEM5QVnVnDbXRmu2jVSuWFYF2at2cnHcyrsNGtM3Hry6+XkFxZz+8hu1mEtiu2zyKlq23IucbfzudAfIMs6nuyXM/u3oGfzbB6YsPiXqYqMSQQ5m3fz6g+rOHdQK7o3y/Y6jqmEnRTmKj2FwIQvKUm485RubNpVxHOTVngdx5hqoarc/eliMtKS+aeNTxn1rMi5fH47heBA9G9dj1F9mjFmai5rt/u8jmNMxH2zZDNTlm3h2uM6Ub9mutdxzD5YkXP5/EEyrePJAbnpxC4ki3DfZ4u9jmJMRPkDJdzz6SLaN8zi4iGtvY5jwrDP/XPuSCRl5QOr3RO+Y14gWEJRoIQs2115QJpmZ/CXo9rz2MRlTMvZytAOnozMZkzEvTJtJau2+Xjl0oGkJlsbIRaE81t6FvgRGAO8APwAvA0sE5ETIpit2viKbQaCg/WnI9rRom4Gd32yiEDQxrU08WfL7iL+83UOx3RpxFGdG3kdx4QpnCK3CujrzgLQH+gLLACOAx6OYLZqU+i3CVMPVo3UZG4Z0ZWlm3bz1s9rvI5jTJV79Iul7C0OcutJXb2OYvZDOEWui6ouLL2hqotwil5u5GJVr9Lu79aSOzjDezRhcLt6PDZxGTsK/F7HMabKzM/LZ+zMtVx6aBvaNbRJg2NJOEVuqYg8JyJHupdncXZVpvPrDAUxzSZMrRoiwh0ju7OrsJgnvlrmdRxjqoSqctcnC6mflcZfj+3odRyzn8IpcpcAOcC1wD+AXHdZMXB0hHJVK5swtep0bVqb8w9pxes/rWHpxnDm1jUmuo2bu54Zq3fwr2GdqV0j1es4Zj+FM+JJoao+pqqnqeqpqvqoqvpUtURV91RHyEgr8Du7KzOsJVclrju+M1lpydz9qY1raWKbzx/gwQlL6NG8Nmf2b7nvB5ios88iJyKHishEEVkmIrmll+oIV11KO57YKQRVo15WGv84vhPf52xj4qJNXscx5oCNnpzLhvy93DGyO8lJNj5lLApnd+V/gceBw4CBIZe4YR1Pqt6Fg1vTsVFN7v1sMUWBoNdxjNlveTt8PD95BSN7N2Ngm7ibeCVhhFPk8lV1gqpuVtVtpZeIJ6tG1vGk6qUmJ3H7yG6s2e7jpe9WeR3HmP32wIQliDgj+pjYFU6R+1ZEHhGRISLSr/QS8WTVyDqeRMbhHRtyXNfGPP3Ncjbv2ut1HGPC9mPuNj6bt4Erj2xP8zoZXscxByGcIncIMAC4H3jMvTwayVDVzecPIALpKTZMT1W79aSu+IMlPPzFUq+jGBOWYIly1yeLaF4ngz8f0d7rOOYg7bPpoqpxcZpAZXz+IFlpKTbxYQS0aZDFZYe15fnJuVw0uDW9W9bxOpIxlXp7+hoWb9jF0+f3tR7XcaDCIiciF6rq6yJyXXn3q+rjkYtVvWyancj66zEd+WDWOu78ZCEfXDXU/pkwUSvfV8yjXyxlUNt6nNSzqddxTBWobP9clvuzVgWXuOFMmGpFLlJqpqdww7DOzF6zk4/mrPM6jjEVeuLrZewsLOaOkd3sn7E4UWFLTlWfd3/eVX1xvOHz26zgkXZGvxa89uNqHpywhBO6NbFOPibqLN+0m1d/WM25A1vRvVm213FMFQnnZPCGInKziIwRkZdKL9URrrr4/AGybMLUiEpKcsa13LSriOcmrfA6jjG/oarc/ekiMtOSuf6ETl7HMVUonO6EHwPZwFfAZyGXuFHgD5JhLbmI69+6Lqf2acaYqbms3e7zOo4xv/h68WamLt/Ktcd1on7NdK/jmCoUTpHLVNUbVXWsqr5feol4smpU6A+QZcfkqsWNJ3YhWYT7xy/2OooxABQFgtz72SLaN8zi4iGtvY5jqlg4Re5TERkR8SQeKigKWlfhatI0O4Orj27PhAUbmbZiq9dxjOHl71exapuP20d2JzXZzpWNN+H8Rv+OU+gKRWSXiOwWkV2RDladCouDNjhzNbr88Ha0qJvB3Z8sIhAs8TqOSWCbd+/lqa+Xc1zXRhzZqaHXcUwEVFrkRCQJGK6qSaqaoaq1VbWWqtaupnzVoqAoQKZ1PKk2NVKTuWVEV5Zs3M3b09d6HccksIc/X4o/WMItJ3XzOoqJkEqLnKqWEGdDeJUVLFGKAiVkplpLrjoN79GEwe3q8diXS8n3xcUE8ybGzF27k/dm5nHZYW1p2yBr3w8wMSmc3ZVfisgZEqdnRvrcCVPtFILqJeKcUpBfWMwTXy/zOo5JMCUlyp2fLKRBzXSuObqD13FMBIVT5K4D3gWK9veYnIgMF5GlIpIjIjeVc/8FIjLPvUwTkd77mf+g/TrNjrXkqlvXprU5/5BWvPrDapZv2u11HJNAPp67jtlrdnLj8M7UqpHqdRwTQfsscu4xuCRVTdufY3Iikgw8A5wIdAPOE5GyO75XAkeqai/gHmDM/r+Fg2MTpnrruuM7k5WWzN2fLkJVvY5jEkBBUYAHJyyhd4tszujXwus4JsLC6i8rInVFZJCIHFF6CeNhg4AcVc1VVT/wNjAqdAVVnaaqO9ybPwLV/omzCVO9VS8rjX8c34mpy7fy9eLNXscxCeDZSTls2lXE7SO7k5QUl0dhTIhwhvW6HJgCfAHc5f68M4znbg6Edp3Lc5dV5I/AhAoyXCEiM0RkxpYtW8J46fDZ7krvXTi4NR0a1eTezxZRFAh6HcfEsTXbfLwwdSWn9W1O/9Z1vY5jqkG458kNBFa7c8v1BcKpNOX9i1Tu/igRORqnyN1Y3v2qOkZVB6jqgIYNq/ZcltKOJ3YKgXdSk5O4/eRurNrm46XvVnkdx8Sxez5bREqScOPwLl5HMdUknCK3V1X3AohIuqouATqH8bg8oGXI7RbA+rIriUgv4EVglKpuC+N5q1RpS85OBvfWEZ0ackK3xjz1zXI25Bd6HcfEoW+Xbmbiok1cc0wHmmTX8DqOqSbhFLk8EakDfARMFJGPKadYlWM60FFE2opIGnAuMC50BRFpBXwAXKSqnvQjt44n0eO2k7sRLFHu/czGtTRVqygQ5K5xC2nXIIvLD2vndRxTjfbZfFHV09yrd4rItzgzEnwexuMCInINzjG8ZOAlVV0oIle6948GbgfqA8+6p+EFVHXAAb2TA1RYbB1PokXLeplcfXQHHp+4jPMHbeXQDg28jmTixAtTclm1zcerlw0iLcXGp0wk4fauPExELlXVycAPVN6B5BeqOl5VO6lqe1W9z1022i1wqOrlqlpXVfu4l2otcOAMzgzYJJ5R4ooj2tG6fia3f7wAf8DGtTQHL2+Hj6e/zeHEHk04wsanTDjh9K68A6dDyP+5i1KB1yMZqjr5/AFEIN3+u4sKNVKTuWNkN1ZsKeDl71d6HcfEgXs+XYQg3HqyjU+ZiML5Zj8NOAUoAFDV9UCtSIaqTj6/MwNBnI5aFpOO6dKY47o25smvrROKOTiTl23hi4VOZ5PmdTK8jmM8EE6R86szFIUCiEhcjWTq8wdsLrkodMdIpxPKfdYJxRygokCQO8ctpG2DLC4/vK3XcYxHwilyY0XkeaCOiPwJ+Ap4IbKxqo/TkrMiF21a1svkqqPa8+m8DTa5qjkgL05dycqtBdx5SnfSU+xvPFGFM3blo8B7wPs458fdrqpPRTpYdSkoCtpoJ1HqyiPb07JeBrd/vJBim1zV7Id1Owt5+pschnVvbJOhJriweluo6kRV/ZeqXq+qEyMdqjr5/AE7fSBK1UhN5o6Tu5OzeY91QjH75d5PF6Eot1lnk4RXYZErnVKnnEvYU+3EAp8/SKadPhC1juvWmGO6NOLJr5azadder+OYGDB1+RYmLNjI1Ud1oEXdTK/jGI9VWORKp9Qp5xLWVDuxwucP2DG5KHfHyG4Ulyh3f7rI6ygmyu0tDnLHxwtpXT+TPx1hI5uYMHdXxrOCoqD1roxyretncfVRHfhs3gYmLbXpeEzFnpu0gtytBdwzqgc1Uu3v2liRo7A4aIMzx4Arj2pHu4ZZ3PbxAgr9Nh2P+b0VW/bw3KQVnNK7mY1sYn6R8EWuoChg0+zEgPSUZO47tSdrtxfy1DfLvY5jooyqcttHC0hPTeLWk7t6HcdEkYQucsESpShQQmaqteRiwZD29TmjXwvGTMll2abdXscxUeTD2euYtmIbNw7vQqNaNo2O+VVCF7nSCVOzrCUXM245qSu1aqRw8wfzKSkpdw5ek2B2FPi597PF9G1Vh/MHtfI6jokyCV7knGM71vEkdtTLSuP/RnRlxuodjJ2x1us4Jgo8OGEJ+YXF3H9aT5KSbAxa81tW5LBZwWPNWf1bMKhtPR6YsISte4q8jmM89PPK7bwzYy2XH9aWrk3j5swmU4USusjZrOCxSUS4/7Qe+PwBG8A5gfkDJdzy4Xya18ng78d19DqOiVIJXeRKW3I2dmXs6dCoFlcd2Z4PZ6+zc+cS1LOTcli+eQ/3nNrd/oZNhRK8yLktOet4EpOuPqYDHRrV5JYPF7DHbZWbxLB0426e+TaHUX2acUyXxl7HMVEswYucHZOLZekpyTx0Ri/W5xfy8OdLvI5jqkmwRLnhvbnUqpHKHSO7ex3HRDkrctgxuVjWv3VdLhnahld/WM3PK7d7HcdUg5e+W8ncvHzuPKU79bLSvI5jolyCFznreBIPrj+hMy3qZnDj+/PYW2xDfsWzVVsLeGziUo7r2piRvZp6HcfEgIQucgVF1vEkHmSlp/Dg6b1YubWAJ76yIb/iVUmJctMH80hNSuLeU3sgYufEmX1L6CJX6A8gAjVSE3ozxIXDOjbg7AEteGFqLvPz8r2OYyLg7elr+TF3O7ec1JUm2TZ0lwlPQn+7F/idGQjsP8L4cMtJ3aiflcYN78+jOFjidRxThTbkF/LA+MUMbV+fcwa29DqOiSEJXeR8/oAN6RVHsjNSuefUHizesIunv8nxOo6pIqrKje/PJ1CiPHh6L/un1OyXBC9yQZsVPM4M696E0/o25+lvc5i7dqfXcUwVeOOnNUxZtoWbR3ShVf1Mr+OYGJPQRa6gKGidTuLQnad0p1GtdP4xdo71toxxq7YWcN9nizm8YwMuHNza6zgmBiV0kSssDtjpA3EoOyOVR87sTe6WAh6yk8RjVrBEuW7sHFKThYfPtN2U5sAkdJErKAqSmW4tuXh0WMcGXDK0DS9/v4ppOVu9jmMOwPNTVjBrzU7uObUHTbMzvI5jYlRCFzmfP0BmqrXk4tWNw7vQrkEW1787l117i72OY/bDovW7+PfEZZzUsymn9G7mdRwTwyJa5ERkuIgsFZEcEbmpnPu7iMgPIlIkItdHMkt5fP6gDc4cxzLSknn8nD5s2l3EXeMWeR3HhKkoEOS6sXOok5nGPXbStzlIEStyIpIMPAOcCHQDzhORbmVW2w78DXg0Ujkq43PPkzPxq0/LOlx9VHven5XH5ws2eh3HhOHxictYsnE3D53R08amNActki25QUCOquaqqh94GxgVuoKqblbV6YAn+5IKiqzjSSL467Ed6dk8m5s+mMf6nYVexzGV+G75VsZMyeW8QS1tCh1TJSJZ5JoDa0Nu57nLokKwRCkKlNgpBAkgNTmJ/5zXl+JACX9/ezYBGw0lKm3ZXcS178yhQ8Oa3H6yTaFjqkYki1x5O9L1gJ5I5AoRmSEiM7Zs2XKQsRylMxBk2TG5hNC2QRb3n96T6at28OTXNohztClxTxfYvbeYp8/vZyMRmSoTySKXB4QOMtcCWH8gT6SqY1R1gKoOaNiwYZWEK3TnkrM/psQxqk9zzurfgqe/zeF7O60gqoyesoKpy7dyx8judG5Sy+s4Jo5EsshNBzqKSFsRSQPOBcZF8PX2S4HNCp6Q7hrVnXYNsrj2nTls3VPkdRwDzFy9g8e+XMZJvZpy3iAbfNlUrYgVOVUNANcAXwCLgbGqulBErhSRKwFEpImI5AHXAbeKSJ6I1I5UplAFRc7uSmvJJZbMtBSePr8f+YXFXDd2LiUlB7QH3VSRfF8xf3trNs3q1OCB03va6QKmykX0PDlVHa+qnVS1vare5y4braqj3esbVbWFqtZW1Tru9V2RzFSqsNhacomqa9Pa3H5yN6Ys28KYqblex0lYzuwC89i0ay9PndeP2jVSvY5k4lDCjnhS2pKzk8ET0wWHtGJEzyY8+sVSfsrd5nWchPTS96v4fOFGbhjemT4t63gdx8SphC1yPveYnJ0nl5hEhAfP6EWreplc/eZsNubv9TpSQvkxdxv3j1/MsO6N+dPh7byOY+JYwhc5212ZuGrXSOX5i/rj8we46o2Z+AN2/lx12JBfyDVvzqJ1/UwePau3HYczEZXARc7dXWktuYTWsXEtHjmzN7PX7OSeT218y0grCgS56vVZFPqDjLmoP7XsOJyJsAQucqW7K60ll+hO6tWUPx/Rjtd+XM3Y6Wv3/QBzQFSVO8ctZM7anTx2dm86NLLz4UzkJW6RKwogAjVSE3YTmBD/GtaZwzo04JaP5ltHlAh5+ftVvPXzWv5yVHuG92jqdRyTIBL2G77AHyQzNdmOBxgAUpKTeOb8frSsm8mVr89k9bYCryPFlW+XbubezxYxrHtjrj+hs9dxTAJJ2CLnzCVnuyrNr7IzU/nvJQMpUfjj/2bYRKtVZOnG3fz1zdl0bVqbf5/Th6Qk+8fSVJ8ELnIBsqzTiSmjbYMsRl/Yn1VbC7j6jVkU24wFB2XrniL++L/pZKQl8+IfBtgxcFPtErbIFRQFybA/OFOOIe3rc99pPZi6fCs3fzAfVRv660AUFAW47JXpbN1TxAsXD6BpdobXkUwCSthv+cJia8mZip0zsBXrd+7lya+X07BWOjcM7+J1pJjiD5Rw5eszWbh+F2Mu6m8jmhjPJGyRKygKUjvDztExFbv2uI5s3l3Es5NW0KhWOpcc2tbrSDGhpES54b25TF2+lYfP7MWxXW2Gb+OdhC1yhf4gTWrX8DqGiWIiwr2n9mDbniLu+nQR9Wqmc0rvZl7Himqqyv3jF/PRnPX8a1hnzh5gU+cYbyXuMTl/wAZnNvuUnCT857y+DGxdj+vemcMXCzd6HSmqPT5xGS9+t5JLhrbhL0e19zqOMYlb5Hz+oA3pZcJSIzWZly4dSM8W2Vzz5iy+WbLJ60hR6T9fL+epb3I4d2BLbj+5m52DaqJCAhe5gA3ObMJWMz2FVy4dRNemtbnytVlMXrbF60hR5dlJOTw+cRln9GvB/af1tHPhTNRIyCIXLFH2FpfYOTtmv2RnpPLqZYPo0KgmV7w6g2+XbvY6UlR4dlIOD3++lFF9mvHwmb2swJmokpBFzmYgMAeqTmYar19+CB0a1eRP/5vBJ3PXex3JM6rKAxMW8/DnSzmldzMeO6s3yVbgTJRJyCJXWDoDgXU8MQegXlYab10xmH6t6vK3t2fz5k9rvI5U7YIlys0fzuf5yblcOLgVT5zTh5TkhPw6MVEuIT+VBTZhqjlItWuk8r/LBnFUp4bc/OF8nvk2J2FGRtlbHORvb8/mrZ/XcvXR7blnVA/bRWmiVkIWudLdlRm2u9IchIy0ZMZcPIBT+zTjkS+W8q/35lEUCHodK6K27C7ivBd+5LN5G7h5RBf+NayL9aI0US0hmzI+a8mZKpKanMS/z+lD6/pZPPn1ctZs8zH6ov7Uy0rzOlqVW7xhF398ZTrbfX6eu6AfJ/a0OeFM9EvIllxBkbXkTNUREf5xfCf+c15f5ubtZNQz37Fo/S6vY1Wpzxds5IznphFU5b0rh1qBMzEjIYtcaceTLOt4YqrQKb2b8fYVgykqLuHUZ7/n9R9Xx/xxuqJAkDvHLeTK12fSsVFNxl1zGD2aZ3sdy5iwJWSRs44nJlL6tqrL+L8fzuB29bn1owVc8+bsmJ18ddXWAs54bhqvTFvFZYe2ZeyVQ2hs472aGJOQ3/LW8cREUoOa6bxyyUDGTM3lkS+WMmftTh44vSdHdGrodbSwlJQob/y8hgfHLyYlOYkXLh7A8d1sJgETmxKyJWcdT0ykJSUJVx7ZnrF/HkJ6ahIXv/Qz142dw44Cv9fRKpWzeQ9nP/8Dt3204JdWqRU4E8sS8lveVxRABGqkJmSNN9Wof+u6jP/b4TzzbQ7PTVrB5KVbuGF4Z87o1yKqTp7eUxRgzOQVjJ6cS0ZaMo+e1Zsz+jW30wNMzEvMIucPkpmabH/AplrUSE3mnyd05qReTbn5g/nc+P58Xpi6khuGdeb4bo09/Rz6AyW8+dNqnvomh20Ffk7p3YzbTu5Gw1rpnmUypiolZJEr8AfJTE/It2481KVJbd6/aihfLNzIw58v5YrXZtK3VR2uOLwdx3drXK0tu4KiAO/PyuOFqbms3V7IkHb1ufHELvRpWafaMhhTHRLym97nD9jgzMYTIsLwHk05rmtj3p2Zx9Pf5HDVG7NoXieDi4a05uwBLSN6IvmqrQW89uNqxk5fy+6iAL1b1uHeU3tyRMcGtmfDxKWIFjkRGQ48CSQDL6rqg2XuF/f+EYAPuERVZ0UyE5ROmJqQ9d1EiZTkJM4b1IqzB7Tkq8WbePn7lTw4YQmPfLGUwe3qMbxHU4Z1b0yjWgfXZV9VWbGlgM8XbGD8/I0s2rCLlCRhRM+mXHJoG/q1qltF78iY6BSxb3oRSQaeAY4H8oDpIjJOVReFrHYi0NG9HAI85/6MKGfCVGvJGe8lJwnDujdhWPcmLNm4i0/mrmfC/I3c9tECbvtoAe0aZNGnVR36tqpL1ya1aFong0a10kktZ9fm3uIgG/P3sn5nIfPX5TN7zU5mr93Bpl1FAPRtVYdbRnRlZO9mNMm2891MYohkc2YQkKOquQAi8jYwCggtcqOAV9UZFuJHEakjIk1VdUMEc1FQFKRWDWvJmejSpUltujSpzfUndGbZpj18tXgTs9fsZMqyLXwwa90v64lA/ax00pJ/3b3oKw6y0/fbk85b1ctkcLv69G9dl+O7NaZpdka1vRdjokUkv+mbA2tDbufx+1Zaees0B35T5ETkCuAKgFatWh10sMHt6lM7w4qciU4iQucmtejcpBbg7HLM21HIii172Ji/lw35e9m8ey+B4K9DhqWnJtE0O4MmtWvQNLsGnZvUon5N6yFpTCS/6cs7il12IL9w1kFVxwBjAAYMGHDQgwHedGKXg30KY6qNiNCyXiYt62V6HcWYmBPJPst5QMuQ2y2A9QewjjHGGHNAIlnkpgMdRaStiKQB5wLjyqwzDrhYHIOB/EgfjzPGGJM4Ira7UlUDInIN8AXOKQQvqepCEbnSvX80MB7n9IEcnFMILo1UHmOMMYknor0vVHU8TiELXTY65LoCV0cygzHGmMQVPSPEGmOMMVXMipwxxpi4ZUXOGGNM3LIiZ4wxJm6J0/cjdojIFmB1FTxVA2BrFTxPdYq1zLGWF2Ivc6zlBctcHWItLxx85taq2rDswpgrclVFRGao6gCvc+yPWMsca3kh9jLHWl6wzNUh1vJC5DLb7kpjjDFxy4qcMcaYuJXIRW6M1wEOQKxljrW8EHuZYy0vWObqEGt5IUKZE/aYnDHGmPiXyC05Y4wxcc6KnDHGmLiVcEVORIaLyFIRyRGRm7zOUx4RaSki34rIYhFZKCJ/d5ffKSLrRGSOexnhddZQIrJKROa72Wa4y+qJyEQRWe7+rOt1TgAR6RyyHeeIyC4RuTbatrGIvCQim0VkQciyCrepiPyf+9leKiLDoijzIyKyRETmiciHIlLHXd5GRApDtvfoCp+4evNW+DmI4m38TkjeVSIyx10eDdu4ou+0yH+WVTVhLjhT/qwA2gFpwFygm9e5ysnZFOjnXq8FLAO6AXcC13udr5Lcq4AGZZY9DNzkXr8JeMjrnBV8LjYCraNtGwNHAP2ABfvapu5nZC6QDrR1P+vJUZL5BCDFvf5QSOY2oetF0TYu93MQzdu4zP2PAbdH0Tau6Dst4p/lRGvJDQJyVDVXVf3A28AojzP9jqpuUNVZ7vXdwGKgubepDtgo4H/u9f8Bp3oXpULHAitUtSpG0qlSqjoF2F5mcUXbdBTwtqoWqepKnHkaB1VHzlDlZVbVL1U14N78EWhR3bkqUsE2rkjUbuNSIiLA2cBb1RqqEpV8p0X8s5xoRa45sDbkdh5RXjxEpA3QF/jJXXSNu8vnpWjZ9RdCgS9FZKaIXOEua6zubO/uz0aepavYufz2CyGatzFUvE1j5fN9GTAh5HZbEZktIpNF5HCvQpWjvM9BLGzjw4FNqro8ZFnUbOMy32kR/ywnWpGTcpZF7TkUIlITeB+4VlV3Ac8B7YE+wAacXRLR5FBV7QecCFwtIkd4HWhfRCQNOAV4110U7du4MlH/+RaRW4AA8Ia7aAPQSlX7AtcBb4pIba/yhajocxD12xg4j9/+0xY127ic77QKVy1n2QFt50QrcnlAy5DbLYD1HmWplIik4nwY3lDVDwBUdZOqBlW1BHgBD3aTVEZV17s/NwMf4uTbJCJNAdyfm71LWK4TgVmqugmifxu7KtqmUf35FpE/ACcDF6h74MXdHbXNvT4T59hLJ+9SOir5HET7Nk4BTgfeKV0WLdu4vO80quGznGhFbjrQUUTauv/BnwuM8zjT77j71P8LLFbVx0OWNw1Z7TRgQdnHekVEskSkVul1nI4GC3C27x/c1f4AfOxNwgr95r/eaN7GISrapuOAc0UkXUTaAh2Bnz3I9zsiMhy4EThFVX0hyxuKSLJ7vR1O5lxvUv6qks9B1G5j13HAElXNK10QDdu4ou80quOz7GWPGy8uwAicnj0rgFu8zlNBxsNwmubzgDnuZQTwGjDfXT4OaOp11pDM7XB6Q80FFpZuW6A+8DWw3P1Zz+usIZkzgW1AdsiyqNrGOAV4A1CM89/tHyvbpsAt7md7KXBiFGXOwTnGUvp5Hu2ue4b7eZkLzAJGRkneCj8H0bqN3eWvAFeWWTcatnFF32kR/yzbsF7GGGPiVqLtrjTGGJNArMgZY4yJW1bkjDHGxC0rcsYYY+KWFTljjDFxy4qcMVFMnJkRMr3OYUysslMIjIliIrIKGKCqW73OYkwsspacMVHCHTXmMxGZKyILROQOoBnwrYh8665zgoj8ICKzRORddyzA0rn8HhKRn91LB3f5We5zzRWRKd69O2O8YUXOmOgxHFivqr1VtQfwBM54fUer6tEi0gC4FThOnYGwZ+AMuFtql6oOAp52HwtwOzBMVXvjDERtTEKxImdM9JgPHOe2yA5X1fwy9w/GmUzye3fW5z/gTPRa6q2Qn0Pc698Dr4jIn3AmhzUmoaR4HcAY41DVZSLSH2dMvwdE5MsyqwgwUVXPq+gpyl5X1StF5BDgJGCOiPRRd0R6YxKBteSMiRIi0gzwqerrwKNAP2A3UMtd5Ufg0JDjbZkiEjplyjkhP39w12mvqj+p6u3AVn47fYkxcc9acsZEj57AIyJSgjO6/FU4ux0niMgG97jcJcBbIpLuPuZWnFk1ANJF5Cecf15LW3uPiEhHnFbg1zgj0RuTMOwUAmPigJ1qYEz5bHelMcaYuGUtOWOMMXHLWnLGGGPilhU5Y4wxccuKnDHGmLhlRc4YY0zcsiJnjDEmbv0/PZDGt4sF9o4AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "p = torch.nn.Parameter(torch.zeros(0))\n", + "opt = torch.optim.SGD([p], lr=5)\n", + "multiplier = WarmupParamScheduler(\n", + " CosineParamScheduler(0.1, 0.0001),\n", + " warmup_factor = 0.001,\n", + " warmup_length = 0.05,\n", + " warmup_method = 'linear'\n", + ")\n", + "total = 100\n", + "scheduler = LRMultiplier(opt, multiplier, total)\n", + "steps, lrs = [], []\n", + "\n", + "for _iter in range(total * 2):\n", + " p.sum().backward()\n", + " opt.step()\n", + " lrs.append(opt.param_groups[0][\"lr\"])\n", + " steps.append(_iter)\n", + "\n", + " scheduler.step()\n", + "draw(steps, lrs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.8" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/LFNA/synthetic-data.ipynb b/notebooks/spaces-xmisc/synthetic-data.ipynb similarity index 100% rename from notebooks/LFNA/synthetic-data.ipynb rename to notebooks/spaces-xmisc/synthetic-data.ipynb diff --git a/notebooks/LFNA/synthetic-env.ipynb b/notebooks/spaces-xmisc/synthetic-env.ipynb similarity index 100% rename from notebooks/LFNA/synthetic-env.ipynb rename to notebooks/spaces-xmisc/synthetic-env.ipynb diff --git a/notebooks/LFNA/synthetic-visualize-env.ipynb b/notebooks/spaces-xmisc/synthetic-visualize-env.ipynb similarity index 100% rename from notebooks/LFNA/synthetic-visualize-env.ipynb rename to notebooks/spaces-xmisc/synthetic-visualize-env.ipynb diff --git a/notebooks/spaces/test-transformer-encoder.ipynb b/notebooks/spaces-xmisc/test-transformer-encoder.ipynb similarity index 100% rename from notebooks/spaces/test-transformer-encoder.ipynb rename to notebooks/spaces-xmisc/test-transformer-encoder.ipynb diff --git a/notebooks/spaces/test.py b/notebooks/spaces/test.py deleted file mode 100644 index 31932a5..0000000 --- a/notebooks/spaces/test.py +++ /dev/null @@ -1,76 +0,0 @@ -import os -import sys -import qlib -import pprint -import numpy as np -import pandas as pd - -from pathlib import Path -import torch - -__file__ = os.path.dirname(os.path.realpath("__file__")) - -lib_dir = (Path(__file__).parent / ".." / "lib").resolve() -print("library path: {:}".format(lib_dir)) -assert lib_dir.exists(), "{:} does not exist".format(lib_dir) -if str(lib_dir) not in sys.path: - sys.path.insert(0, str(lib_dir)) - -from trade_models import get_transformer - -from qlib import config as qconfig -from qlib.utils import init_instance_by_config -from qlib.model.base import Model -from qlib.data.dataset import DatasetH -from qlib.data.dataset.handler import DataHandlerLP - -qlib.init(provider_uri="~/.qlib/qlib_data/cn_data", region=qconfig.REG_CN) - -dataset_config = { - "class": "DatasetH", - "module_path": "qlib.data.dataset", - "kwargs": { - "handler": { - "class": "Alpha360", - "module_path": "qlib.contrib.data.handler", - "kwargs": { - "start_time": "2008-01-01", - "end_time": "2020-08-01", - "fit_start_time": "2008-01-01", - "fit_end_time": "2014-12-31", - "instruments": "csi100", - }, - }, - "segments": { - "train": ("2008-01-01", "2014-12-31"), - "valid": ("2015-01-01", "2016-12-31"), - "test": ("2017-01-01", "2020-08-01"), - }, - }, -} -pprint.pprint(dataset_config) -dataset = init_instance_by_config(dataset_config) - -df_train, df_valid, df_test = dataset.prepare( - ["train", "valid", "test"], - col_set=["feature", "label"], - data_key=DataHandlerLP.DK_L, -) -model = get_transformer(None) -print(model) - -features = torch.from_numpy(df_train["feature"].values).float() -labels = torch.from_numpy(df_train["label"].values).squeeze().float() - -batch = list(range(2000)) -predicts = model(features[batch]) -mask = ~torch.isnan(labels[batch]) - -pred = predicts[mask] -label = labels[batch][mask] - -loss = torch.nn.functional.mse_loss(pred, label) - -from sklearn.metrics import mean_squared_error - -mse_loss = mean_squared_error(pred.numpy(), label.numpy()) diff --git a/scripts/experimental/train-vit.sh b/scripts/experimental/train-vit.sh index 4dc1772..b8aa85b 100644 --- a/scripts/experimental/train-vit.sh +++ b/scripts/experimental/train-vit.sh @@ -28,4 +28,4 @@ python ./exps/basic/xmain.py --save_dir ${save_dir} --rand_seed ${rseed} \ --model_config ./configs/yaml.model/vit-cifar10.s0 \ --optim_config ./configs/yaml.opt/vit.cifar \ --loss_config ./configs/yaml.loss/cross-entropy \ - --lr 0.003 --weight_decay 0.3 + --lr 0.003 --weight_decay 0.3 --scheduler warm-cos --steps 10000 diff --git a/tests/test_misc_scheduler.py b/tests/test_misc_scheduler.py new file mode 100644 index 0000000..bbf14b5 --- /dev/null +++ b/tests/test_misc_scheduler.py @@ -0,0 +1,73 @@ +#################################################### +# Copyright (c) Facebook, Inc. and its affiliates. # +#################################################### +# Inspired from https://github.com/facebookresearch/detectron2/blob/master/tests/test_scheduler.py +#################################################### +import math +import numpy as np +from unittest import TestCase + +import torch + +from xautodl.xmisc.scheduler_utils import CosineParamScheduler, MultiStepParamScheduler +from xautodl.xmisc.scheduler_utils import LRMultiplier, WarmupParamScheduler + + +class TestScheduler(TestCase): + """Test the scheduler.""" + + def test_warmup_multistep(self): + p = torch.nn.Parameter(torch.zeros(0)) + opt = torch.optim.SGD([p], lr=5) + + multiplier = WarmupParamScheduler( + MultiStepParamScheduler( + [1, 0.1, 0.01, 0.001], + milestones=[10, 15, 20], + num_updates=30, + ), + 0.001, + 5 / 30, + ) + sched = LRMultiplier(opt, multiplier, 30) + # This is an equivalent of: + # sched = WarmupMultiStepLR( + # opt, milestones=[10, 15, 20], gamma=0.1, warmup_factor=0.001, warmup_iters=5) + + p.sum().backward() + opt.step() + + lrs = [0.005] + for _ in range(30): + sched.step() + lrs.append(opt.param_groups[0]["lr"]) + self.assertTrue(np.allclose(lrs[:5], [0.005, 1.004, 2.003, 3.002, 4.001])) + self.assertTrue(np.allclose(lrs[5:10], 5.0)) + self.assertTrue(np.allclose(lrs[10:15], 0.5)) + self.assertTrue(np.allclose(lrs[15:20], 0.05)) + self.assertTrue(np.allclose(lrs[20:], 0.005)) + + def test_warmup_cosine(self): + p = torch.nn.Parameter(torch.zeros(0)) + opt = torch.optim.SGD([p], lr=5) + multiplier = WarmupParamScheduler( + CosineParamScheduler(1, 0), + 0.001, + 5 / 30, + ) + sched = LRMultiplier(opt, multiplier, 30) + + p.sum().backward() + opt.step() + self.assertEqual(opt.param_groups[0]["lr"], 0.005) + lrs = [0.005] + + for _ in range(30): + sched.step() + lrs.append(opt.param_groups[0]["lr"]) + for idx, lr in enumerate(lrs): + expected_cosine = 2.5 * (1.0 + math.cos(math.pi * idx / 30)) + if idx >= 5: + self.assertAlmostEqual(lr, expected_cosine) + else: + self.assertNotAlmostEqual(lr, expected_cosine) diff --git a/xautodl/xmisc/__init__.py b/xautodl/xmisc/__init__.py index dad05e2..f6a9ce9 100644 --- a/xautodl/xmisc/__init__.py +++ b/xautodl/xmisc/__init__.py @@ -10,3 +10,23 @@ from .yaml_utils import load_yaml from .torch_utils import count_parameters from .logger_utils import Logger + +# sampler +from .sampler_utils import BatchSampler + +# scheduler related +from .scheduler_utils import CosineParamScheduler, WarmupParamScheduler, LRMultiplier + + +def get_scheduler(indicator, lr): + if indicator == "warm-cos": + multiplier = WarmupParamScheduler( + CosineParamScheduler(lr, lr * 1e-3), + warmup_factor=0.001, + warmup_length=0.05, + warmup_method="linear", + ) + + else: + raise ValueError("Unknown indicator: {:}".format(indicator)) + return multiplier diff --git a/xautodl/xmisc/sampler_utils.py b/xautodl/xmisc/sampler_utils.py new file mode 100644 index 0000000..1ad9a00 --- /dev/null +++ b/xautodl/xmisc/sampler_utils.py @@ -0,0 +1,32 @@ +##################################################### +# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.06 # +##################################################### +import random + + +class BatchSampler: + """A batch sampler used for single machine training.""" + + def __init__(self, dataset, batch, steps): + self._num_per_epoch = len(dataset) + self._iter_per_epoch = self._num_per_epoch // batch + self._steps = steps + self._batch = batch + if self._num_per_epoch < self._batch: + raise ValueError( + "The dataset size must be larger than batch={:}".format(batch) + ) + self._indexes = list(range(self._num_per_epoch)) + + def __iter__(self): + """ + yield a batch of indexes using random sampling + """ + for i in range(self._steps): + if i % self._iter_per_epoch == 0: + random.shuffle(self._indexes) + j = i % self._iter_per_epoch + yield self._indexes[j * self._batch : (j + 1) * self._batch] + + def __len__(self): + return self._steps diff --git a/xautodl/xmisc/scheduler_utils.py b/xautodl/xmisc/scheduler_utils.py index 4dc5ade..df0ffdf 100644 --- a/xautodl/xmisc/scheduler_utils.py +++ b/xautodl/xmisc/scheduler_utils.py @@ -1,136 +1,532 @@ -##################################################### -# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.06 # -##################################################### -from torch.optim.lr_scheduler import _LRScheduler +#################################################### +# Copyright (c) Facebook, Inc. and its affiliates. # +#################################################### +# Borrowed from https://github.com/facebookresearch/fvcore/blob/master/fvcore/common/param_scheduler.py +# and https://github.com/facebookresearch/detectron2/blob/master/detectron2/solver/lr_scheduler.py +#################################################### +import torch + +import bisect +import math +from typing import List, Optional, Sequence, Union + +__all__ = [ + "ParamScheduler", + "ConstantParamScheduler", + "CosineParamScheduler", + "ExponentialParamScheduler", + "LinearParamScheduler", + "CompositeParamScheduler", + "MultiStepParamScheduler", + "StepParamScheduler", + "StepWithFixedGammaParamScheduler", + "PolynomialDecayParamScheduler", + "WarmupParamScheduler", + "LRMultiplier", +] -class CosineDecayWithWarmup(_LRScheduler): - r"""Set the learning rate of each parameter group using a cosine annealing - schedule, where :math:`\eta_{max}` is set to the initial lr, :math:`T_{cur}` - is the number of epochs since the last restart and :math:`T_{i}` is the number - of epochs between two warm restarts in SGDR: - .. math:: - \eta_t = \eta_{min} + \frac{1}{2}(\eta_{max} - \eta_{min})\left(1 + - \cos\left(\frac{T_{cur}}{T_{i}}\pi\right)\right) - When :math:`T_{cur}=T_{i}`, set :math:`\eta_t = \eta_{min}`. - When :math:`T_{cur}=0` after restart, set :math:`\eta_t=\eta_{max}`. - It has been proposed in - `SGDR: Stochastic Gradient Descent with Warm Restarts`_. - Args: - optimizer (Optimizer): Wrapped optimizer. - T_0 (int): Number of iterations for the first restart. - T_mult (int, optional): A factor increases :math:`T_{i}` after a restart. Default: 1. - eta_min (float, optional): Minimum learning rate. Default: 0. - last_epoch (int, optional): The index of last epoch. Default: -1. - verbose (bool): If ``True``, prints a message to stdout for - each update. Default: ``False``. - .. _SGDR\: Stochastic Gradient Descent with Warm Restarts: - https://arxiv.org/abs/1608.03983 +class ParamScheduler: + """ + Base class for parameter schedulers. + A parameter scheduler defines a mapping from a progress value in [0, 1) to + a number (e.g. learning rate). + """ + + # To be used for comparisons with where + WHERE_EPSILON = 1e-6 + + def __call__(self, where: float) -> float: + """ + Get the value of the param for a given point at training. + + We update params (such as learning rate) based on the percent progress + of training completed. This allows a scheduler to be agnostic to the + exact length of a particular run (e.g. 120 epochs vs 90 epochs), as + long as the relative progress where params should be updated is the same. + However, it assumes that the total length of training is known. + + Args: + where: A float in [0,1) that represents how far training has progressed + + """ + raise NotImplementedError("Param schedulers must override __call__") + + +class ConstantParamScheduler(ParamScheduler): + """ + Returns a constant value for a param. + """ + + def __init__(self, value: float) -> None: + self._value = value + + def __call__(self, where: float) -> float: + if where >= 1.0: + raise RuntimeError( + f"where in ParamScheduler must be in [0, 1]: got {where}" + ) + return self._value + + +class CosineParamScheduler(ParamScheduler): + """ + Cosine decay or cosine warmup schedules based on start and end values. + The schedule is updated based on the fraction of training progress. + The schedule was proposed in 'SGDR: Stochastic Gradient Descent with + Warm Restarts' (https://arxiv.org/abs/1608.03983). Note that this class + only implements the cosine annealing part of SGDR, and not the restarts. + + Example: + + .. code-block:: python + + CosineParamScheduler(start_value=0.1, end_value=0.0001) """ def __init__( - self, optimizer, T_0, T_mult=1, eta_min=0, last_epoch=-1, verbose=False - ): - if T_0 <= 0 or not isinstance(T_0, int): - raise ValueError("Expected positive integer T_0, but got {}".format(T_0)) - if T_mult < 1 or not isinstance(T_mult, int): - raise ValueError("Expected integer T_mult >= 1, but got {}".format(T_mult)) - self.T_0 = T_0 - self.T_i = T_0 - self.T_mult = T_mult - self.eta_min = eta_min + self, + start_value: float, + end_value: float, + ) -> None: + self._start_value = start_value + self._end_value = end_value - super(CosineDecayWithWarmup, self).__init__(optimizer, last_epoch, verbose) + def __call__(self, where: float) -> float: + return self._end_value + 0.5 * (self._start_value - self._end_value) * ( + 1 + math.cos(math.pi * where) + ) - self.T_cur = self.last_epoch - def get_lr(self): - if not self._get_lr_called_within_step: - warnings.warn( - "To get the last learning rate computed by the scheduler, " - "please use `get_last_lr()`.", - UserWarning, +class ExponentialParamScheduler(ParamScheduler): + """ + Exponetial schedule parameterized by a start value and decay. + The schedule is updated based on the fraction of training + progress, `where`, with the formula + `param_t = start_value * (decay ** where)`. + + Example: + + .. code-block:: python + ExponentialParamScheduler(start_value=2.0, decay=0.02) + + Corresponds to a decreasing schedule with values in [2.0, 0.04). + """ + + def __init__( + self, + start_value: float, + decay: float, + ) -> None: + self._start_value = start_value + self._decay = decay + + def __call__(self, where: float) -> float: + return self._start_value * (self._decay ** where) + + +class LinearParamScheduler(ParamScheduler): + """ + Linearly interpolates parameter between ``start_value`` and ``end_value``. + Can be used for either warmup or decay based on start and end values. + The schedule is updated after every train step by default. + + Example: + + .. code-block:: python + + LinearParamScheduler(start_value=0.0001, end_value=0.01) + + Corresponds to a linear increasing schedule with values in [0.0001, 0.01) + """ + + def __init__( + self, + start_value: float, + end_value: float, + ) -> None: + self._start_value = start_value + self._end_value = end_value + + def __call__(self, where: float) -> float: + # interpolate between start and end values + return self._end_value * where + self._start_value * (1 - where) + + +class MultiStepParamScheduler(ParamScheduler): + """ + Takes a predefined schedule for a param value, and a list of epochs or steps + which stand for the upper boundary (excluded) of each range. + + Example: + + .. code-block:: python + + MultiStepParamScheduler( + values=[0.1, 0.01, 0.001, 0.0001], + milestones=[30, 60, 80, 120] + ) + + Then the param value will be 0.1 for epochs 0-29, 0.01 for + epochs 30-59, 0.001 for epochs 60-79, 0.0001 for epochs 80-120. + Note that the length of values must be equal to the length of milestones + plus one. + """ + + def __init__( + self, + values: List[float], + num_updates: Optional[int] = None, + milestones: Optional[List[int]] = None, + ) -> None: + """ + Args: + values: param value in each range + num_updates: the end of the last range. If None, will use ``milestones[-1]`` + milestones: the boundary of each range. If None, will evenly split ``num_updates`` + + For example, all the following combinations define the same scheduler: + + * num_updates=90, milestones=[30, 60], values=[1, 0.1, 0.01] + * num_updates=90, values=[1, 0.1, 0.01] + * milestones=[30, 60, 90], values=[1, 0.1, 0.01] + * milestones=[3, 6, 9], values=[1, 0.1, 0.01] (ParamScheduler is scale-invariant) + """ + if num_updates is None and milestones is None: + raise ValueError("num_updates and milestones cannot both be None") + if milestones is None: + # Default equispaced drop_epochs behavior + milestones = [] + step_width = math.ceil(num_updates / float(len(values))) + for idx in range(len(values) - 1): + milestones.append(step_width * (idx + 1)) + else: + if not ( + isinstance(milestones, Sequence) + and len(milestones) == len(values) - int(num_updates is not None) + ): + raise ValueError( + "MultiStep scheduler requires a list of %d miletones" + % (len(values) - int(num_updates is not None)) + ) + + if num_updates is None: + num_updates, milestones = milestones[-1], milestones[:-1] + if num_updates < len(values): + raise ValueError( + "Total num_updates must be greater than length of param schedule" ) - return [ - self.eta_min - + (base_lr - self.eta_min) - * (1 + math.cos(math.pi * self.T_cur / self.T_i)) - / 2 - for base_lr in self.base_lrs - ] + self._param_schedule = values + self._num_updates = num_updates + self._milestones: List[int] = milestones - def step(self, epoch=None): - """Step could be called after every batch update - Example: - >>> scheduler = CosineDecayWithWarmup(optimizer, T_0, T_mult) - >>> iters = len(dataloader) - >>> for epoch in range(20): - >>> for i, sample in enumerate(dataloader): - >>> inputs, labels = sample['inputs'], sample['labels'] - >>> optimizer.zero_grad() - >>> outputs = net(inputs) - >>> loss = criterion(outputs, labels) - >>> loss.backward() - >>> optimizer.step() - >>> scheduler.step(epoch + i / iters) - This function can be called in an interleaved way. - Example: - >>> scheduler = CosineDecayWithWarmup(optimizer, T_0, T_mult) - >>> for epoch in range(20): - >>> scheduler.step() - >>> scheduler.step(26) - >>> scheduler.step() # scheduler.step(27), instead of scheduler(20) - """ - - if epoch is None and self.last_epoch < 0: - epoch = 0 - - if epoch is None: - epoch = self.last_epoch + 1 - self.T_cur = self.T_cur + 1 - if self.T_cur >= self.T_i: - self.T_cur = self.T_cur - self.T_i - self.T_i = self.T_i * self.T_mult - else: - if epoch < 0: + start_epoch = 0 + for milestone in self._milestones: + # Do not exceed the total number of epochs + if milestone >= self._num_updates: raise ValueError( - "Expected non-negative epoch, but got {}".format(epoch) + "Milestone must be smaller than total number of updates: " + "num_updates=%d, milestone=%d" % (self._num_updates, milestone) ) - if epoch >= self.T_0: - if self.T_mult == 1: - self.T_cur = epoch % self.T_0 - else: - n = int( - math.log( - (epoch / self.T_0 * (self.T_mult - 1) + 1), self.T_mult - ) - ) - self.T_cur = epoch - self.T_0 * (self.T_mult ** n - 1) / ( - self.T_mult - 1 - ) - self.T_i = self.T_0 * self.T_mult ** (n) - else: - self.T_i = self.T_0 - self.T_cur = epoch - self.last_epoch = math.floor(epoch) + # Must be in ascending order + if start_epoch >= milestone: + raise ValueError( + "Milestone must be smaller than start epoch: start_epoch=%d, milestone=%d" + % (start_epoch, milestone) + ) + start_epoch = milestone - class _enable_get_lr_call: - def __init__(self, o): - self.o = o + def __call__(self, where: float) -> float: + if where > 1.0: + raise RuntimeError( + f"where in ParamScheduler must be in [0, 1]: got {where}" + ) + epoch_num = int((where + self.WHERE_EPSILON) * self._num_updates) + return self._param_schedule[bisect.bisect_right(self._milestones, epoch_num)] - def __enter__(self): - self.o._get_lr_called_within_step = True - return self - def __exit__(self, type, value, traceback): - self.o._get_lr_called_within_step = False - return self +class PolynomialDecayParamScheduler(ParamScheduler): + """ + Decays the param value after every epoch according to a + polynomial function with a fixed power. + The schedule is updated after every train step by default. - with _enable_get_lr_call(self): - for i, data in enumerate(zip(self.optimizer.param_groups, self.get_lr())): - param_group, lr = data - param_group["lr"] = lr - self.print_lr(self.verbose, i, lr, epoch) + Example: - self._last_lr = [group["lr"] for group in self.optimizer.param_groups] + .. code-block:: python + + PolynomialDecayParamScheduler(base_value=0.1, power=0.9) + + Then the param value will be 0.1 for epoch 0, 0.099 for epoch 1, and + so on. + """ + + def __init__( + self, + base_value: float, + power: float, + ) -> None: + self._base_value = base_value + self._power = power + + def __call__(self, where: float) -> float: + return self._base_value * (1 - where) ** self._power + + +class StepParamScheduler(ParamScheduler): + """ + Takes a fixed schedule for a param value. If the length of the + fixed schedule is less than the number of epochs, then the epochs + are divided evenly among the param schedule. + The schedule is updated after every train epoch by default. + + Example: + + .. code-block:: python + + StepParamScheduler(values=[0.1, 0.01, 0.001, 0.0001], num_updates=120) + + Then the param value will be 0.1 for epochs 0-29, 0.01 for + epochs 30-59, 0.001 for epoch 60-89, 0.0001 for epochs 90-119. + """ + + def __init__( + self, + num_updates: Union[int, float], + values: List[float], + ) -> None: + if num_updates <= 0: + raise ValueError("Number of updates must be larger than 0") + if not (isinstance(values, Sequence) and len(values) > 0): + raise ValueError( + "Step scheduler requires a list of at least one param value" + ) + self._param_schedule = values + + def __call__(self, where: float) -> float: + ind = int((where + self.WHERE_EPSILON) * len(self._param_schedule)) + return self._param_schedule[ind] + + +class StepWithFixedGammaParamScheduler(ParamScheduler): + """ + Decays the param value by gamma at equal number of steps so as to have the + specified total number of decays. + + Example: + + .. code-block:: python + + StepWithFixedGammaParamScheduler( + base_value=0.1, gamma=0.1, num_decays=3, num_updates=120) + + Then the param value will be 0.1 for epochs 0-29, 0.01 for + epochs 30-59, 0.001 for epoch 60-89, 0.0001 for epochs 90-119. + """ + + def __init__( + self, + base_value: float, + num_decays: int, + gamma: float, + num_updates: int, + ) -> None: + for k in [base_value, gamma]: + if not (isinstance(k, (int, float)) and k > 0): + raise ValueError("base_value and gamma must be positive numbers") + for k in [num_decays, num_updates]: + if not (isinstance(k, int) and k > 0): + raise ValueError("num_decays and num_updates must be positive integers") + + self.base_value = base_value + self.num_decays = num_decays + self.gamma = gamma + self.num_updates = num_updates + values = [base_value] + for _ in range(num_decays): + values.append(values[-1] * gamma) + + self._step_param_scheduler = StepParamScheduler( + num_updates=num_updates, values=values + ) + + def __call__(self, where: float) -> float: + return self._step_param_scheduler(where) + + +class CompositeParamScheduler(ParamScheduler): + """ + Composite parameter scheduler composed of intermediate schedulers. + Takes a list of schedulers and a list of lengths corresponding to + percentage of training each scheduler should run for. Schedulers + are run in order. All values in lengths should sum to 1.0. + + Each scheduler also has a corresponding interval scale. If interval + scale is 'fixed', the intermediate scheduler will be run without any rescaling + of the time. If interval scale is 'rescaled', intermediate scheduler is + run such that each scheduler will start and end at the same values as it + would if it were the only scheduler. Default is 'rescaled' for all schedulers. + + Example: + + .. code-block:: python + + schedulers = [ + ConstantParamScheduler(value=0.42), + CosineParamScheduler(start_value=0.42, end_value=1e-4) + ] + CompositeParamScheduler( + schedulers=schedulers, + interval_scaling=['rescaled', 'rescaled'], + lengths=[0.3, 0.7]) + + The parameter value will be 0.42 for the first [0%, 30%) of steps, + and then will cosine decay from 0.42 to 0.0001 for [30%, 100%) of + training. + """ + + def __init__( + self, + schedulers: Sequence[ParamScheduler], + lengths: List[float], + interval_scaling: Sequence[str], + ) -> None: + if len(schedulers) != len(lengths): + raise ValueError("Schedulers and lengths must be same length") + if len(schedulers) == 0: + raise ValueError( + "There must be at least one scheduler in the composite scheduler" + ) + if abs(sum(lengths) - 1.0) >= 1e-3: + raise ValueError("The sum of all values in lengths must be 1") + if sum(lengths) != 1.0: + lengths[-1] = 1.0 - sum(lengths[:-1]) + for s in interval_scaling: + if s not in ["rescaled", "fixed"]: + raise ValueError(f"Unsupported interval_scaling: {s}") + + self._lengths = lengths + self._schedulers = schedulers + self._interval_scaling = interval_scaling + + def __call__(self, where: float) -> float: + # Find scheduler corresponding to where + i = 0 + running_total = self._lengths[i] + while (where + self.WHERE_EPSILON) > running_total and i < len( + self._schedulers + ) - 1: + i += 1 + running_total += self._lengths[i] + scheduler = self._schedulers[i] + scheduler_where = where + interval_scale = self._interval_scaling[i] + if interval_scale == "rescaled": + # Calculate corresponding where % for scheduler + scheduler_start = running_total - self._lengths[i] + scheduler_where = (where - scheduler_start) / self._lengths[i] + return scheduler(scheduler_where) + + +class WarmupParamScheduler(CompositeParamScheduler): + """ + Add an initial warmup stage to another scheduler. + """ + + def __init__( + self, + scheduler: ParamScheduler, + warmup_factor: float, + warmup_length: float, + warmup_method: str = "linear", + ): + """ + Args: + scheduler: warmup will be added at the beginning of this scheduler + warmup_factor: the factor w.r.t the initial value of ``scheduler``, e.g. 0.001 + warmup_length: the relative length (in [0, 1]) of warmup steps w.r.t the entire + training, e.g. 0.01 + warmup_method: one of "linear" or "constant" + """ + end_value = scheduler(warmup_length) # the value to reach when warmup ends + start_value = warmup_factor * scheduler(0.0) + if warmup_method == "constant": + warmup = ConstantParamScheduler(start_value) + elif warmup_method == "linear": + warmup = LinearParamScheduler(start_value, end_value) + else: + raise ValueError("Unknown warmup method: {}".format(warmup_method)) + super().__init__( + [warmup, scheduler], + interval_scaling=["rescaled", "fixed"], + lengths=[warmup_length, 1 - warmup_length], + ) + + +##### LR Scheduler + + +class LRMultiplier(torch.optim.lr_scheduler._LRScheduler): + """ + A LRScheduler which uses fvcore :class:`ParamScheduler` to multiply the + learning rate of each param in the optimizer. + Every step, the learning rate of each parameter becomes its initial value + multiplied by the output of the given :class:`ParamScheduler`. + The absolute learning rate value of each parameter can be different. + This scheduler can be used as long as the relative scale among them do + not change during training. + Examples: + :: + LRMultiplier( + opt, + WarmupParamScheduler( + MultiStepParamScheduler( + [1, 0.1, 0.01], + milestones=[60000, 80000], + num_updates=90000, + ), 0.001, 100 / 90000 + ), + max_iter=90000 + ) + """ + + # NOTES: in the most general case, every LR can use its own scheduler. + # Supporting this requires interaction with the optimizer when its parameter + # group is initialized. For example, classyvision implements its own optimizer + # that allows different schedulers for every parameter group. + # To avoid this complexity, we use this class to support the most common cases + # where the relative scale among all LRs stay unchanged during training. In this + # case we only need a total of one scheduler that defines the relative LR multiplier. + + def __init__( + self, + optimizer: torch.optim.Optimizer, + multiplier: ParamScheduler, + max_iter: int, + last_iter: int = -1, + ): + """ + Args: + optimizer, last_iter: See ``torch.optim.lr_scheduler._LRScheduler``. + ``last_iter`` is the same as ``last_epoch``. + multiplier: a fvcore ParamScheduler that defines the multiplier on + every LR of the optimizer + max_iter: the total number of training iterations + """ + if not isinstance(multiplier, ParamScheduler): + raise ValueError( + "_LRMultiplier(multiplier=) must be an instance of fvcore " + f"ParamScheduler. Got {multiplier} instead." + ) + self._multiplier = multiplier + self._max_iter = max_iter + super().__init__(optimizer, last_epoch=last_iter) + + def state_dict(self): + # fvcore schedulers are stateless. Only keep pytorch scheduler states + return {"base_lrs": self.base_lrs, "last_epoch": self.last_epoch} + + def get_lr(self) -> List[float]: + multiplier = self._multiplier(self.last_epoch / self._max_iter) + return [base_lr * multiplier for base_lr in self.base_lrs] diff --git a/xautodl/xmodels/__init__.py b/xautodl/xmodels/__init__.py index e44777d..04f21fe 100644 --- a/xautodl/xmodels/__init__.py +++ b/xautodl/xmodels/__init__.py @@ -5,6 +5,3 @@ ##################################################### from .transformers import get_transformer - -def obtain_model(config): - raise NotImplementedError