layers -> xlayers

This commit is contained in:
D-X-Y 2021-03-18 20:15:50 +08:00
parent eabdd21d97
commit badb6cf51d
16 changed files with 140 additions and 30 deletions

View File

@ -43,5 +43,5 @@ jobs:
echo "Show what we have here:" echo "Show what we have here:"
ls ls
python --version python --version
python -m pytest ./tests -s python -m pytest ./tests/test_basic_space.py -s
shell: bash shell: bash

32
.github/workflows/super_model_test.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Run Python Tests for Super Model
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
build:
strategy:
matrix:
os: [ubuntu-16.04, ubuntu-18.04, ubuntu-20.04, macos-latest]
python-version: [3.6, 3.7, 3.8, 3.9]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Test Super Model
run: |
python -m pip install pytest numpy
python -m pip install torch torchvision torchaudio
python -m pytest ./tests/test_super_model.py -s
shell: bash

View File

@ -1,5 +0,0 @@
from .drop import DropBlock2d, DropPath
from .mlp import MLP
from .weight_init import trunc_normal_
from .positional_embedding import PositionalEncoder

View File

@ -7,6 +7,8 @@
from .basic_space import Categorical from .basic_space import Categorical
from .basic_space import Continuous from .basic_space import Continuous
from .basic_space import Integer from .basic_space import Integer
from .basic_space import Space
from .basic_space import VirtualNode
from .basic_op import has_categorical from .basic_op import has_categorical
from .basic_op import has_continuous from .basic_op import has_continuous
from .basic_op import get_min from .basic_op import get_min

View File

@ -7,6 +7,7 @@ import math
import copy import copy
import random import random
import numpy as np import numpy as np
from collections import OrderedDict
from typing import Optional from typing import Optional
@ -44,6 +45,32 @@ class Space(metaclass=abc.ABCMeta):
return copy.deepcopy(self) return copy.deepcopy(self)
class VirtualNode(Space):
"""For a nested search space, we represent it as a tree structure.
For example,
"""
def __init__(self, id=None, value=None):
self._id = id
self._value = value
self._attributes = OrderedDict()
def has(self, x):
for key, value in self._attributes.items():
if isinstance(value, Space) and value.has(x):
return True
return False
def __repr__(self):
strs = [self.__class__.__name__ + "("]
indent = " " * 4
for key, value in self._attributes.items():
strs.append(indent + strs(value))
strs.append(")")
return "\n".join(strs)
class Categorical(Space): class Categorical(Space):
"""A space contains the categorical values. """A space contains the categorical values.
It can be a nested space, which means that the candidate in this space can also be a search space. It can be a nested space, which means that the candidate in this space can also be a search space.

View File

@ -12,7 +12,7 @@ import torch
import torch.nn as nn import torch.nn as nn
import torch.nn.functional as F import torch.nn.functional as F
import layers as xlayers import xlayers
DEFAULT_NET_CONFIG = dict( DEFAULT_NET_CONFIG = dict(

11
lib/xlayers/__init__.py Normal file
View File

@ -0,0 +1,11 @@
#####################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2019.01 #
#####################################################
# This file is expected to be self-contained, expect
# for importing from spaces to include search space.
#####################################################
from .drop import DropBlock2d, DropPath
from .mlp import MLP
from .weight_init import trunc_normal_
from .positional_embedding import PositionalEncoder

View File

@ -1,16 +1,15 @@
##################################################### #####################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.03 # # Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.03 #
##################################################### #####################################################
import torch
import torch.nn as nn import torch.nn as nn
from torch.nn.parameter import Parameter
from torch import Tensor
import math import math
from typing import Optional, Union from typing import Optional, Union
import spaces import spaces
from layers.super_module import SuperModule from .super_module import SuperModule
from layers.super_module import SuperRunType from .super_module import SuperRunMode
IntSpaceType = Union[int, spaces.Integer, spaces.Categorical] IntSpaceType = Union[int, spaces.Integer, spaces.Categorical]
BoolSpaceType = Union[bool, spaces.Categorical] BoolSpaceType = Union[bool, spaces.Categorical]
@ -32,11 +31,11 @@ class SuperLinear(SuperModule):
self._out_features = out_features self._out_features = out_features
self._bias = bias self._bias = bias
self._super_weight = Parameter( self._super_weight = torch.nn.Parameter(
torch.Tensor(self.out_features, self.in_features) torch.Tensor(self.out_features, self.in_features)
) )
if bias: if self.bias:
self._super_bias = Parameter(torch.Tensor(self.out_features)) self._super_bias = torch.nn.Parameter(torch.Tensor(self.out_features))
else: else:
self.register_parameter("_super_bias", None) self.register_parameter("_super_bias", None)
self.reset_parameters() self.reset_parameters()
@ -53,6 +52,9 @@ class SuperLinear(SuperModule):
def bias(self): def bias(self):
return spaces.has_categorical(self._bias, True) return spaces.has_categorical(self._bias, True)
def abstract_search_space(self):
print('-')
def reset_parameters(self) -> None: def reset_parameters(self) -> None:
nn.init.kaiming_uniform_(self._super_weight, a=math.sqrt(5)) nn.init.kaiming_uniform_(self._super_weight, a=math.sqrt(5))
if self.bias: if self.bias:
@ -60,7 +62,7 @@ class SuperLinear(SuperModule):
bound = 1 / math.sqrt(fan_in) bound = 1 / math.sqrt(fan_in)
nn.init.uniform_(self._super_bias, -bound, bound) nn.init.uniform_(self._super_bias, -bound, bound)
def forward_raw(self, input: Tensor) -> Tensor: def forward_raw(self, input: torch.Tensor) -> torch.Tensor:
return F.linear(input, self._super_weight, self._super_bias) return F.linear(input, self._super_weight, self._super_bias)
def extra_repr(self) -> str: def extra_repr(self) -> str:

View File

@ -14,12 +14,12 @@ class SuperRunMode(Enum):
Default = "fullmodel" Default = "fullmodel"
class SuperModule(abc.ABCMeta, nn.Module): class SuperModule(abc.ABC, nn.Module):
"""This class equips the nn.Module class with the ability to apply AutoDL.""" """This class equips the nn.Module class with the ability to apply AutoDL."""
def __init__(self): def __init__(self):
super(SuperModule, self).__init__() super(SuperModule, self).__init__()
self._super_run_type = SuperRunMode.default self._super_run_type = SuperRunMode.Default
@abc.abstractmethod @abc.abstractmethod
def abstract_search_space(self): def abstract_search_space(self):

View File

@ -2,7 +2,7 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 2, "execution_count": 1,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
@ -31,29 +31,42 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 3, "execution_count": 2,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stdout", "ename": "AttributeError",
"output_type": "stream", "evalue": "default",
"text": [ "output_type": "error",
"SuperRunType.FullModel\n", "traceback": [
"SuperRunType.FullModel\n", "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"True\n", "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
"True\n" "\u001b[0;32m~/Desktop/XAutoDL/notebooks/spaces\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mout_features\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mspaces\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCategorical\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m12\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m24\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m36\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mbias\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mspaces\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCategorical\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSuperLinear\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mout_features\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbias\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mbias\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/Desktop/XAutoDL/lib/layers/super_mlp.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, in_features, out_features, bias)\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[0mbias\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mBoolSpaceType\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 27\u001b[0m ) -> None:\n\u001b[0;32m---> 28\u001b[0;31m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mSuperLinear\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 29\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[0;31m# the raw input args\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/Desktop/XAutoDL/lib/layers/super_module.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mSuperModule\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 22\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_super_run_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSuperRunMode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdefault\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 23\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mabc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mabstractmethod\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.8/enum.py\u001b[0m in \u001b[0;36m__getattr__\u001b[0;34m(cls, name)\u001b[0m\n\u001b[1;32m 339\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_member_map_\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 340\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mKeyError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 341\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mAttributeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 343\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__getitem__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcls\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mAttributeError\u001b[0m: default"
] ]
} }
], ],
"source": [ "source": [
"# Test the Linear layer\n",
"import spaces\n",
"from layers.super_core import SuperLinear\n", "from layers.super_core import SuperLinear\n",
"from layers.super_module import SuperRunMode\n", "from layers.super_module import SuperRunMode\n",
"\n", "\n",
"print(SuperRunMode.Default)\n", "out_features = spaces.Categorical(12, 24, 36)\n",
"print(SuperRunMode.FullModel)\n", "bias = spaces.Categorical(True, False)\n",
"print(SuperRunMode.Default == SuperRunMode.FullModel)\n", "model = SuperLinear(10, out_features, bias=bias)\n",
"print(SuperRunMode.FullModel == SuperRunMode.FullModel)" "print(model)"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {

28
tests/test_super_model.py Normal file
View File

@ -0,0 +1,28 @@
#####################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.03 #
#####################################################
# pytest ./tests/test_super_model.py -s #
#####################################################
import sys, random
import unittest
import pytest
from pathlib import Path
lib_dir = (Path(__file__).parent / ".." / "lib").resolve()
print("library path: {:}".format(lib_dir))
if str(lib_dir) not in sys.path:
sys.path.insert(0, str(lib_dir))
import torch
from xlayers import super_core
import spaces
class TestSuperLinear(unittest.TestCase):
"""Test the super linear."""
def test_super_linear(self):
out_features = spaces.Categorical(12, 24, 36)
bias = spaces.Categorical(True, False)
model = super_core.SuperLinear(10, out_features, bias=bias)
print(model)