update README

This commit is contained in:
D-X-Y 2019-10-04 23:32:19 +10:00
parent f8f3f382e0
commit 1dee8c014a
4 changed files with 8 additions and 62 deletions

View File

@ -1,6 +1,6 @@
# Nueral Architecture Search
This project contains the following neural architecture search algorithms, implemented in [PyTorch](http://pytorch.org).
This project contains the following neural architecture search algorithms, implemented in [PyTorch](http://pytorch.org). More NAS resources can be found in [Awesome-NAS](https://github.com/D-X-Y/Awesome-NAS).
- Network Pruning via Transformable Architecture Search, NeurIPS 2019
- One-Shot Neural Architecture Search via Self-Evaluated Template Network, ICCV 2019
@ -20,6 +20,7 @@ In this paper, we proposed a differentiable searching strategy for transformable
<img src="https://d-x-y.github.com/resources/paper-icon/NIPS-2019-TAS.png" width="700">
### Usage
Use `bash ./scripts/prepare.sh` to prepare data splits for `CIFAR-10`, `CIFARR-100`, and `ILSVRC2012`.
@ -50,6 +51,7 @@ Highlight: we equip one-shot NAS with an architecture sampler and train network
<img src="https://d-x-y.github.com/resources/paper-icon/ICCV-2019-SETN.png" width="450">
### Usage
Please use the following scripts to train the searched SETN-searched CNN on CIFAR-10, CIFAR-100, and ImageNet.
```
CUDA_VISIBLE_DEVICES=0 bash ./scripts/nas-infer-train.sh cifar10 SETN 96 -1
@ -81,6 +83,7 @@ Searching codes come soon!
# Citation
If you find that this project helps your research, please consider citing some of the following papers:
```
@inproceedings{dong2019tas,

View File

@ -10,6 +10,7 @@ from copy import deepcopy
from pathlib import Path
lib_dir = (Path(__file__).parent / '..' / 'lib').resolve()
print ('lib_dir : {:}'.format(lib_dir))
if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir))
from config_utils import load_config, configure2str, obtain_search_single_args as obtain_args
from procedures import prepare_seed, prepare_logger, save_checkpoint, copy_checkpoint

View File

@ -1,50 +0,0 @@
#!/bin/bash
# bash ./scripts/KD-train.sh cifar10 ResNet110 ResNet110 0.5 1 -1
echo script name: $0
echo $# arguments
if [ "$#" -ne 6 ] ;then
echo "Input illegal number of parameters " $#
echo "Need 6 parameters for the dataset / the-model-name / the-teacher-path / KD-alpha / KD-temperature / the-random-seed"
exit 1
fi
if [ "$TORCH_HOME" = "" ]; then
echo "Must set TORCH_HOME envoriment variable for data dir saving"
exit 1
else
echo "TORCH_HOME : $TORCH_HOME"
fi
dataset=$1
model=$2
teacher=$3
alpha=$4
temperature=$5
epoch=E300
LR=L1
batch=256
rseed=$6
save_dir=./output/KD/${dataset}-${teacher}.2.${model}-${alpha}-${temperature}
rm -rf ${save_dir}
PY_C="./env/bin/python"
if [ ! -f ${PY_C} ]; then
echo "Local Run with Python: "`which python`
PY_C="python"
else
echo "Cluster Run with Python: "${PY_C}
fi
${PY_C} --version
${PY_C} ./exps/KD-main.py --dataset ${dataset} \
--data_path $TORCH_HOME/cifar.python \
--model_config ./configs/archs/CIFAR-${model}.config \
--optim_config ./configs/opts/CIFAR-${epoch}-W5-${LR}-COS.config \
--KD_checkpoint $TORCH_HOME/TAS-checkpoints/basemodels/${dataset}/${teacher}.pth \
--procedure Simple-KD \
--save_dir ${save_dir} \
--KD_alpha ${alpha} --KD_temperature ${temperature} \
--cutout_length -1 \
--batch_size ${batch} --rand_seed ${rseed} --workers 4 \
--eval_frequency 1 --print_freq 100 --print_freq_eval 200

View File

@ -22,21 +22,13 @@ batch=$5
rseed=$6
PY_C="./env/bin/python"
if [ ! -f ${PY_C} ]; then
echo "Local Run with Python: "`which python`
PY_C="python"
SAVE_ROOT="./output"
else
echo "Cluster Run with Python: "${PY_C}
SAVE_ROOT="./hadoop-data/SearchCheckpoints"
fi
SAVE_ROOT="./output"
save_dir=${SAVE_ROOT}/basic/${dataset}/${model}-${epoch}-${LR}-${batch}
${PY_C} --version
python --version
${PY_C} ./exps/basic-main.py --dataset ${dataset} \
python ./exps/basic-main.py --dataset ${dataset} \
--data_path $TORCH_HOME/cifar.python \
--model_config ./configs/archs/CIFAR-${model}.config \
--optim_config ./configs/opts/CIFAR-${epoch}-W5-${LR}-COS.config \