Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2021
Sun, Y; Sun, X; Fang, Y; Yen, G G; Liu, Y
A Novel Training Protocol for Performance Predictors of Evolutionary Neural Architecture Search Algorithms Journal Article
In: IEEE Transactions on Evolutionary Computation, pp. 1-1, 2021.
@article{9336721,
title = {A Novel Training Protocol for Performance Predictors of Evolutionary Neural Architecture Search Algorithms},
author = {Y Sun and X Sun and Y Fang and G G Yen and Y Liu},
url = {https://ieeexplore.ieee.org/document/9336721},
doi = {10.1109/TEVC.2021.3055076},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
pages = {1-1},
abstract = {Evolutionary Neural Architecture Search (ENAS) can automatically design the architectures of Deep Neural Networks (DNNs) using evolutionary computation algorithms. However, most ENAS algorithms require intensive computational resource, which is not necessarily available to the users interested. Performance predictors are a type of regression models which can assist to accomplish the search, while without exerting much computational resource. Despite various performance predictors have been designed, they employ the same training protocol to build the regression models: 1) sampling a set of DNNs with performance as the training dataset, 2) training the model with the mean square error criterion, and 3) predicting the performance of DNNs newly generated during the ENAS. In this paper, we point out that the three steps constituting the training protocol are not well though-out through intuitive and illustrative examples. Furthermore, we propose a new training protocol to address these issues, consisting of designing a pairwise ranking indicator to construct the training target, proposing to use the logistic regression to fit the training samples, and developing a differential method to build the training instances. To verify the effectiveness of the proposed training protocol, four widely used regression models in the field of machine learning have been chosen to perform the comparisons on two benchmark datasets. The experimental results of all the comparisons demonstrate that the proposed training protocol can significantly improve the performance prediction accuracy against the traditional training protocols.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Turner, Jack; Crowley, Elliot J; O'Boyle, Michael F P
Neural Architecture Search as Program Transformation Exploration Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-06599,
title = {Neural Architecture Search as Program Transformation Exploration},
author = {Jack Turner and Elliot J Crowley and Michael F P O'Boyle},
url = {https://arxiv.org/abs/2102.06599},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.06599},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Lopes, Vasco; Alirezazadeh, Saeid; í, Lu
EPE-NAS: Efficient Performance Estimation Without Training for Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-08099,
title = {EPE-NAS: Efficient Performance Estimation Without Training for Neural Architecture Search},
author = {Vasco Lopes and Saeid Alirezazadeh and Lu í},
url = {https://arxiv.org/abs/2102.08099},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.08099},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yan, Shen; Song, Kaiqiang; Liu, Fei; Zhang, Mi
CATE: Computation-aware Neural Architecture Encoding with Transformers Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-07108,
title = {CATE: Computation-aware Neural Architecture Encoding with Transformers},
author = {Shen Yan and Kaiqiang Song and Fei Liu and Mi Zhang},
url = {https://arxiv.org/abs/2102.07108},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.07108},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Calisto, Maria Baldeon G; Lai-Yuen, Susana K
EMONAS: efficient multiobjective neural architecture search framework for 3D medical image segmentation Proceedings Article
In: Išgum, Ivana; Landman, Bennett A (Ed.): Medical Imaging 2021: Image Processing, pp. 22 – 34, International Society for Optics and Photonics SPIE, 2021.
@inproceedings{10.1117/12.2577088,
title = {EMONAS: efficient multiobjective neural architecture search framework for 3D medical image segmentation},
author = {Maria Baldeon G Calisto and Susana K Lai-Yuen},
editor = {Ivana Išgum and Bennett A Landman},
url = {https://doi.org/10.1117/12.2577088},
doi = {10.1117/12.2577088},
year = {2021},
date = {2021-01-01},
booktitle = {Medical Imaging 2021: Image Processing},
volume = {11596},
pages = {22 -- 34},
publisher = {SPIE},
organization = {International Society for Optics and Photonics},
abstract = {Deep learning plays a critical role in medical image segmentation. Nevertheless, manually designing a neural network for a specific segmentation problem is a very difficult and time-consuming task due to the massive hyperparameter search space, long training time and large volumetric data. Therefore, most designed networks are highly complex, task specific and over-parametrized. Recently, multiobjective neural architecture search (NAS) methods have been proposed to automate the design of accurate and efficient segmentation architectures. However, they only search for either the macro- or micro-structure of the architecture, do not use the information produced during the optimization process to increase the efficiency of the search, and do not consider the volumetric nature of medical images. In this work, we propose EMONAS, an Efficient MultiObjective Neural Architecture Search framework for 3D medical image segmentation. EMONAS is composed of a search space that considers both the macro- and micro-structure of the architecture, and a surrogate-assisted multiobjective evolutionary based algorithm that efficiently searches for the best hyperparameters using a Random Forest surrogate and guiding selection probabilities. EMONAS is evaluated on the task of cardiac segmentation from the ACDC MICCAI challenge. The architecture found is ranked within the top 10 submissions in all evaluation metrics, performing better or comparable to other approaches while reducing the search time by more than 50% and having considerably fewer number of parameters.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Robbiano, Luca; Rahman, Muhammad Rameez Ur; Galasso, Fabio; Caputo, Barbara; Carlucci, Fabio Maria
Adversarial Branch Architecture Search for Unsupervised Domain Adaptation Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-06679,
title = {Adversarial Branch Architecture Search for Unsupervised Domain Adaptation},
author = {Luca Robbiano and Muhammad Rameez Ur Rahman and Fabio Galasso and Barbara Caputo and Fabio Maria Carlucci},
url = {https://arxiv.org/abs/2102.06679},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.06679},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Dilin; Gong, Chengyue; Li, Meng; Liu, Qiang; Chandra, Vikas
AlphaNet: Improved Training of Supernet with Alpha-Divergence Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-07954,
title = {AlphaNet: Improved Training of Supernet with Alpha-Divergence},
author = {Dilin Wang and Chengyue Gong and Meng Li and Qiang Liu and Vikas Chandra},
url = {https://arxiv.org/abs/2102.07954},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.07954},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Roberts, Nicholas Carl; Khodak, Mikhail; Dao, Tri; Li, Liam; Balcan, Nina; Re, Christopher; Talwalkar, Ameet
Searching for Convolutions and a More Ambitious NAS Miscellaneous
2021.
@misc{<LineBreak>roberts2021searching,
title = {Searching for Convolutions and a More Ambitious NAS},
author = {Nicholas Carl Roberts and Mikhail Khodak and Tri Dao and Liam Li and Nina Balcan and Christopher Re and Ameet Talwalkar},
url = {https://openreview.net/forum?id=ascdLuNQY4J},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Ru, Binxin; Wan, Xingchen; Dong, Xiaowen; Osborne, Michael
Interpretable Neural Architecture Search via Bayesian Optimisation with Weisfeiler-Lehman Kernels Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>ru2021interpretable,
title = {Interpretable Neural Architecture Search via Bayesian Optimisation with Weisfeiler-Lehman Kernels},
author = {Binxin Ru and Xingchen Wan and Xiaowen Dong and Michael Osborne},
url = {https://openreview.net/forum?id=j9Rv7qdXjd},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhou, Yanqi; Dong, Xuanyi; Akin, Berkin; Tan, Mingxing; Peng, Daiyi; Meng, Tianjian; Yazdanbakhsh, Amir; Huang, Da; Narayanaswami, Ravi; Laudon, James
Rethinking Co-design of Neural Architectures and Hardware Accelerators Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2102-08619,
title = {Rethinking Co-design of Neural Architectures and Hardware Accelerators},
author = {Yanqi Zhou and Xuanyi Dong and Berkin Akin and Mingxing Tan and Daiyi Peng and Tianjian Meng and Amir Yazdanbakhsh and Da Huang and Ravi Narayanaswami and James Laudon},
url = {https://arxiv.org/abs/2102.08619},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2102.08619},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Hu, Liangxiao; Liu, Qinglin; Zhang, Jun; Jiang, Feng; Liu, Yang; Zhang, Shengping
A-DARTS: attention-guided differentiable architecture search for lung nodule classification Journal Article
In: Journal of Electronic Imaging, vol. 30, no. 1, pp. 1 – 11, 2021.
@article{10.1117/1.JEI.30.1.013012,
title = {A-DARTS: attention-guided differentiable architecture search for lung nodule classification},
author = {Liangxiao Hu and Qinglin Liu and Jun Zhang and Feng Jiang and Yang Liu and Shengping Zhang},
url = {https://doi.org/10.1117/1.JEI.30.1.013012},
doi = {10.1117/1.JEI.30.1.013012},
year = {2021},
date = {2021-01-01},
journal = {Journal of Electronic Imaging},
volume = {30},
number = {1},
pages = {1 -- 11},
publisher = {SPIE},
abstract = {Lung cancer has caused the most cancer deaths in the past several years. Benign–malignant lung nodule classification is vital in lung nodule detection, which can help early diagnosis of lung cancer. Most existing works extract the features of chest CT images using the well-designed networks, which require substantial effort of experts. To automate the manual process of network design, we propose an attention-guided differentiable architecture search (A-DARTS) method, which directly searches for the optimal network on chest CT images. In addition, A-DARTS utilizes an attention mechanism to alleviate the effect of the initialization-sensitive nature of the searched network while enhancing the feature presentation ability. Extensive experiments on the Lung Image Database Consortium image collection (LIDC-IDRI) benchmark dataset show that the proposed method achieves a lung nodule classification accuracy of 92.93%, which is superior to the state-of-the-art methods.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nayman, Niv; Aflalo, Yonathan; Noy, Asaf; Zelnik-Manor, Lihi
HardCoRe-NAS: Hard Constrained diffeRentiable Neural Architecture Search Technical Report
2021.
@techreport{nayman2021hardcore,
title = {HardCoRe-NAS: Hard Constrained diffeRentiable Neural Architecture Search},
author = {Niv Nayman and Yonathan Aflalo and Asaf Noy and Lihi Zelnik-Manor},
url = {https://arxiv.org/abs/2102.11646},
year = {2021},
date = {2021-01-01},
journal = {arXiv preprint arXiv:2102.11646},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Nguyen, Nam; Chang, Morris J
Contrastive Self-supervised Neural Architecture Search Miscellaneous
2021.
@misc{nguyen2021contrastive,
title = {Contrastive Self-supervised Neural Architecture Search},
author = {Nam Nguyen and Morris J Chang},
url = {https://arxiv.org/abs/2102.10557},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Guo, Yong; Zheng, Yin; Tan, Mingkui; Chen, Qi; Li, Zhipeng; Chen, Jian; Zhao, Peilin; Huang, Junzhou
Towards Accurate and Compact Architectures via Neural Architecture Transformer Technical Report
2021.
@techreport{guo2021accurate,
title = {Towards Accurate and Compact Architectures via Neural Architecture Transformer},
author = {Yong Guo and Yin Zheng and Mingkui Tan and Qi Chen and Zhipeng Li and Jian Chen and Peilin Zhao and Junzhou Huang},
url = {https://arxiv.org/abs/2102.10301},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wu, Junru; Dai, Xiyang; Chen, Dongdong; Chen, Yinpeng; Liu, Mengchen; Yu, Ye; Wang, Zhangyang; Liu, Zicheng; Chen, Mei; Yuan, Lu
Weak NAS Predictors Are All You Need Technical Report
2021.
@techreport{wu2021weak,
title = {Weak NAS Predictors Are All You Need},
author = {Junru Wu and Xiyang Dai and Dongdong Chen and Yinpeng Chen and Mengchen Liu and Ye Yu and Zhangyang Wang and Zicheng Liu and Mei Chen and Lu Yuan},
url = {https://arxiv.org/abs/2102.10490},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Zijun; Cofer, Evan M.; Troyanskaya, Olga G.
AMBIENT: Accelerated Convolutional Neural Network Architecture Search for Regulatory Genomics Proceedings Article
In: Machine Learning in Computational Biology (MLCB 2020), 2021.
@inproceedings{ZhangMLCB2020,
title = {AMBIENT: Accelerated Convolutional Neural Network Architecture Search for Regulatory Genomics},
author = {Zijun Zhang and Evan M. Cofer and Olga G. Troyanskaya},
url = {https://www.biorxiv.org/content/biorxiv/early/2021/02/27/2021.02.25.432960.full.pdf},
year = {2021},
date = {2021-01-01},
booktitle = {Machine Learning in Computational Biology (MLCB 2020)},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chen, J; Jiang, Y; Huang, Z; Guo, X; Wu, B; Sun, L; Wu, T
Fine-Grained Detection of Driver Distraction Based on Neural Architecture Search Journal Article
In: IEEE Transactions on Intelligent Transportation Systems, pp. 1-19, 2021.
@article{9352235,
title = {Fine-Grained Detection of Driver Distraction Based on Neural Architecture Search},
author = {J Chen and Y Jiang and Z Huang and X Guo and B Wu and L Sun and T Wu},
url = {https://ieeexplore.ieee.org/abstract/document/9352235},
doi = {10.1109/TITS.2021.3055545},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Intelligent Transportation Systems},
pages = {1-19},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhang, T; Lei, C; Zhang, Z; Meng, X -B; Chen, C L P
AS-NAS: Adaptive Scalable Neural Architecture Search with Reinforced Evolutionary Algorithm for Deep Learning Journal Article
In: IEEE Transactions on Evolutionary Computation, pp. 1-1, 2021.
@article{9360872,
title = {AS-NAS: Adaptive Scalable Neural Architecture Search with Reinforced Evolutionary Algorithm for Deep Learning},
author = {T Zhang and C Lei and Z Zhang and X -B Meng and C L P Chen},
url = {https://ieeexplore.ieee.org/abstract/document/9360872},
doi = {10.1109/TEVC.2021.3061466},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xie, Guoyang; Wang, Jinbao; Yu, Guo; Zheng, Feng; Jin, Yaochu
Tiny Adversarial Mulit-Objective Oneshot Neural Architecture Search Technical Report
2021.
@techreport{xie2021tiny,
title = {Tiny Adversarial Mulit-Objective Oneshot Neural Architecture Search},
author = {Guoyang Xie and Jinbao Wang and Guo Yu and Feng Zheng and Yaochu Jin},
url = {https://arxiv.org/abs/2103.00363},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Le, Cat P; Soltani, Mohammadreza; Ravier, Robert; Tarokh, Vahid
Neural Architecture Search From Task Similarity Measure Technical Report
2021.
@techreport{le2021neural,
title = {Neural Architecture Search From Task Similarity Measure},
author = {Cat P Le and Mohammadreza Soltani and Robert Ravier and Vahid Tarokh},
url = {https://arxiv.org/abs/2103.00241},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Guo, Yong; Chen, Yaofo; Zheng, Yin; Chen, Qi; Zhao, Peilin; Chen, Jian; Huang, Junzhou; Tan, Mingkui
Pareto-Frontier-aware Neural Architecture Generation for Diverse Budgets Technical Report
2021.
@techreport{guo2021paretofrontieraware,
title = {Pareto-Frontier-aware Neural Architecture Generation for Diverse Budgets},
author = {Yong Guo and Yaofo Chen and Yin Zheng and Qi Chen and Peilin Zhao and Jian Chen and Junzhou Huang and Mingkui Tan},
url = {https://arxiv.org/abs/2103.00219},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Jeong, Wonyong; Lee, Hayeon; Park, Gun; Hyung, Eunyoung; Baek, Jinheon; Hwang, Sung Ju
Task-Adaptive Neural Network Retrieval with Meta-Contrastive Learning Technical Report
2021.
@techreport{jeong2021taskadaptive,
title = {Task-Adaptive Neural Network Retrieval with Meta-Contrastive Learning},
author = {Wonyong Jeong and Hayeon Lee and Gun Park and Eunyoung Hyung and Jinheon Baek and Sung Ju Hwang},
url = {https://arxiv.org/abs/2103.01495},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Ziwei; Wang, Xin; Zhu, Wenwu
Automated Machine Learning on Graphs: A Survey Technical Report
2021.
@techreport{zhang2021automated,
title = {Automated Machine Learning on Graphs: A Survey},
author = {Ziwei Zhang and Xin Wang and Wenwu Zhu},
url = {https://arxiv.org/abs/2103.00742},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Luyan; Wen, Zhiwei; Liu, Songwei; Zhou, Hong-Yu; Zhu, Hongwei; Xie, Weicheng; Shen, Linlin; Ma, Kai; Zheng, Yefeng
MixSearch: Searching for Domain Generalized Medical Image Segmentation Architectures Technical Report
2021.
@techreport{liu2021mixsearch,
title = {MixSearch: Searching for Domain Generalized Medical Image Segmentation Architectures},
author = {Luyan Liu and Zhiwei Wen and Songwei Liu and Hong-Yu Zhou and Hongwei Zhu and Weicheng Xie and Linlin Shen and Kai Ma and Yefeng Zheng},
url = {https://arxiv.org/abs/2102.13280},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Guo, Qingbei; Wu, Xiao-Jun; Kittler, Josef; Feng, Zhiquan
Differentiable Neural Architecture Learning for Efficient Neural Network Design Technical Report
2021.
@techreport{guo2021differentiable,
title = {Differentiable Neural Architecture Learning for Efficient Neural Network Design},
author = {Qingbei Guo and Xiao-Jun Wu and Josef Kittler and Zhiquan Feng},
url = {https://arxiv.org/abs/2103.02126},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Javeri, Indrajeet Y; Toutiaee, Mohammadhossein; Arpinar, Ismailcem B; Miller, Tom W; Miller, John A
Improving Neural Networks for Time Series Forecasting using Data Augmentation and AutoML Technical Report
2021.
@techreport{javeri2021improving,
title = {Improving Neural Networks for Time Series Forecasting using Data Augmentation and AutoML},
author = {Indrajeet Y Javeri and Mohammadhossein Toutiaee and Ismailcem B Arpinar and Tom W Miller and John A Miller},
url = {https://arxiv.org/abs/2103.01992},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Chen, Yaofo; Guo, Yong; Chen, Qi; Li, Minli; Wang, Yaowei; Zeng, Wei; Tan, Mingkui
Contrastive Neural Architecture Search with Neural Architecture Comparators Proceedings Article
In: CVPR2021, 2021.
@inproceedings{DBLP:journals/corr/abs-2103-05471,
title = {Contrastive Neural Architecture Search with Neural Architecture Comparators},
author = {Yaofo Chen and Yong Guo and Qi Chen and Minli Li and Yaowei Wang and Wei Zeng and Mingkui Tan},
url = {https://arxiv.org/abs/2103.05471},
year = {2021},
date = {2021-01-01},
booktitle = {CVPR2021},
journal = {CoRR},
volume = {abs/2103.05471},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Kaiyu; Chen, Jinglong; He, Shuilong; Xu, Enyong; Li, Fudong; Zhou, Zitong
In: Mechanical Systems and Signal Processing, vol. 158, pp. 107773, 2021, ISSN: 0888-3270.
@article{ZHANG2021107773,
title = {Differentiable neural architecture search augmented with pruning and multi-objective optimization for time-efficient intelligent fault diagnosis of machinery},
author = {Kaiyu Zhang and Jinglong Chen and Shuilong He and Enyong Xu and Fudong Li and Zitong Zhou},
url = {https://www.sciencedirect.com/science/article/pii/S0888327021001680},
doi = {https://doi.org/10.1016/j.ymssp.2021.107773},
issn = {0888-3270},
year = {2021},
date = {2021-01-01},
journal = {Mechanical Systems and Signal Processing},
volume = {158},
pages = {107773},
abstract = {Intelligent fault diagnosis, which is mainly based on neural network, has been widely used in machinery monitoring. Although such deep learning methods are effective, the new architectures are mainly handcrafted by series of experiments that require ample time and substantial efforts. To automate process of building neural networks and save designing time, a novel differentiable neural architecture search method is proposed. By gradually reducing candidate operations while retaining trained parameters during pruning, computation consumed by each stage of neural architecture search is decreased, which accelerates search process. To improve inferential efficiency of subnetworks, specially designed penalty terms are introduced into the objective function for searching optimal numbers of layers and nodes, which can reduce complexity of subnetworks and save calculation time of signal analysis. In addition, exclusive competition between candidate operations is broken by changing discretization and selection methods of operations, which provides a basis for channel fusion. Effectiveness of the proposed method is verified by two datasets. Experiments show that this method can generate subnetworks of lower complexity and less computational cost than other state-of-art neural architecture search techniques, while achieving competitive result.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Liang, Tingting; Wang, Yongtao; Tang, Zhi; Hu, Guosheng; Ling, Haibin
OPANAS: One-Shot Path Aggregation Network Architecture Search for Object Detection Technical Report
2021.
@techreport{liang2021opanas,
title = {OPANAS: One-Shot Path Aggregation Network Architecture Search for Object Detection},
author = {Tingting Liang and Yongtao Wang and Zhi Tang and Guosheng Hu and Haibin Ling},
url = {https://arxiv.org/abs/2103.04507},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Cao, Shengcao; Wang, Xiaofang; Kitani, Kris
Efficient Model Performance Estimation via Feature Histories Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-04450,
title = {Efficient Model Performance Estimation via Feature Histories},
author = {Shengcao Cao and Xiaofang Wang and Kris Kitani},
url = {https://arxiv.org/abs/2103.04450},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.04450},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Shi-Xin; Hsieh, Chang-Yu; Zhang, Shengyu; Yao, Hong
Neural Predictor based Quantum Architecture Search Technical Report
2021.
@techreport{zhang2021neural,
title = {Neural Predictor based Quantum Architecture Search},
author = {Shi-Xin Zhang and Chang-Yu Hsieh and Shengyu Zhang and Hong Yao},
url = {https://arxiv.org/abs/2103.06524},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Xiaobo; Zhang, Chaochao; Cai, Zhihua; Yang, Jianfeng; Zhou, Zhilang; Gong, Xin
Continuous Particle Swarm Optimization-Based Deep Learning Architecture Search for Hyperspectral Image Classification Journal Article
In: Remote Sensing, vol. 13, no. 6, 2021, ISSN: 2072-4292.
@article{rs13061082,
title = {Continuous Particle Swarm Optimization-Based Deep Learning Architecture Search for Hyperspectral Image Classification},
author = {Xiaobo Liu and Chaochao Zhang and Zhihua Cai and Jianfeng Yang and Zhilang Zhou and Xin Gong},
url = {https://www.mdpi.com/2072-4292/13/6/1082},
doi = {10.3390/rs13061082},
issn = {2072-4292},
year = {2021},
date = {2021-01-01},
journal = {Remote Sensing},
volume = {13},
number = {6},
abstract = {Deep convolutional neural networks (CNNs) are widely used in hyperspectral image (HSI) classification. However, the most successful CNN architectures are handcrafted, which need professional knowledge and consume a very significant amount of time. To automatically design cell-based CNN architectures for HSI classification, we propose an efficient continuous evolutionary method, named CPSO-Net, which can dramatically accelerate optimal architecture generation by the optimization of weight-sharing parameters. First, a SuperNet with all candidate operations is maintained to share the parameters for all individuals and optimized by collecting the gradients of all individuals in the population. Second, a novel direct encoding strategy is devised to encode architectures into particles, which inherit the parameters from the SuperNet. Then, particle swarm optimization is used to search for the optimal deep architecture from the particle swarm. Furthermore, experiments with limited training samples based on four widely used biased and unbiased hyperspectral datasets showed that our proposed method achieves good performance comparable to the state-of-the-art HSI classification methods.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ban, Hao; Xie, Pengtao
Interleaving Learning, with Application to Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-07018,
title = {Interleaving Learning, with Application to Neural Architecture Search},
author = {Hao Ban and Pengtao Xie},
url = {https://arxiv.org/abs/2103.07018},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.07018},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Choi, Kwanghee; Choe, Minyoung; Lee, Hyelee
Pretraining Neural Architecture Search Controllers with Locality-based Self-Supervised Learning Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-08157,
title = {Pretraining Neural Architecture Search Controllers with Locality-based Self-Supervised Learning},
author = {Kwanghee Choi and Minyoung Choe and Hyelee Lee},
url = {https://arxiv.org/abs/2103.08157},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.08157},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Gracheva, Ekaterina
Trainless Model Performance Estimation for Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-08312,
title = {Trainless Model Performance Estimation for Neural Architecture Search},
author = {Ekaterina Gracheva},
url = {https://arxiv.org/abs/2103.08312},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.08312},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wu, Xuan; Zhang, Xiuyi; Jia, Linhan; Chen, Liang; Liang, Yanchun; Zhou, You; Wu, Chunguo
Neural Architecture Search based on Cartesian Genetic Programming Coding Method Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-07173,
title = {Neural Architecture Search based on Cartesian Genetic Programming Coding Method},
author = {Xuan Wu and Xiuyi Zhang and Linhan Jia and Liang Chen and Yanchun Liang and You Zhou and Chunguo Wu},
url = {https://arxiv.org/abs/2103.07173},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.07173},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Luo, Xiangzhong; Liu, Di; Huai, Shuo; Liu, Weichen
HSCoNAS: Hardware-Software Co-Design of Efficient DNNs via Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-08325,
title = {HSCoNAS: Hardware-Software Co-Design of Efficient DNNs via Neural Architecture Search},
author = {Xiangzhong Luo and Di Liu and Shuo Huai and Weichen Liu},
url = {https://arxiv.org/abs/2103.08325},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.08325},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Ruochen; Cheng, Minhao; Chen, Xiangning; Tang, Xiaocheng; Hsieh, Cho-Jui
Rethinking Architecture Selection in Differentiable NAS Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>wang2021rethinking,
title = {Rethinking Architecture Selection in Differentiable NAS},
author = {Ruochen Wang and Minhao Cheng and Xiangning Chen and Xiaocheng Tang and Cho-Jui Hsieh},
url = {https://openreview.net/forum?id=PKubaeJkw3},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
-, Sian; -, Wei
Searching by Generating: Flexible and Efficient One-Shot NAS with Architecture Generator Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-07289,
title = {Searching by Generating: Flexible and Efficient One-Shot NAS with Architecture Generator},
author = {Sian - and Wei -},
url = {https://arxiv.org/abs/2103.07289},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.07289},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Lakhmiri, Dounia; é, S
Use of static surrogates in hyperparameter optimization Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-07963,
title = {Use of static surrogates in hyperparameter optimization},
author = {Dounia Lakhmiri and S é},
url = {https://arxiv.org/abs/2103.07963},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.07963},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Inkawhich, Matthew; Inkawhich, Nathan; Davis, Eric; Li, Hai; Chen, Yiran
The Untapped Potential of Off-the-Shelf Convolutional Neural Networks Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-09891,
title = {The Untapped Potential of Off-the-Shelf Convolutional Neural Networks},
author = {Matthew Inkawhich and Nathan Inkawhich and Eric Davis and Hai Li and Yiran Chen},
url = {https://arxiv.org/abs/2103.09891},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.09891},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Chen, Zhiqiang; Xu, Ting-Bing; Liao, Weijian; Li, Zhengcheng; Li, Jinpeng; Liu, Cheng-Lin; He, Huiguang
SNAP: Shaping neural architectures progressively via information density criterion Journal Article
In: Pattern Recognition, vol. 116, pp. 107923, 2021, ISSN: 0031-3203.
@article{CHEN2021107923,
title = {SNAP: Shaping neural architectures progressively via information density criterion},
author = {Zhiqiang Chen and Ting-Bing Xu and Weijian Liao and Zhengcheng Li and Jinpeng Li and Cheng-Lin Liu and Huiguang He},
url = {https://www.sciencedirect.com/science/article/pii/S0031320321001102},
doi = {https://doi.org/10.1016/j.patcog.2021.107923},
issn = {0031-3203},
year = {2021},
date = {2021-01-01},
journal = {Pattern Recognition},
volume = {116},
pages = {107923},
abstract = {Excellent neural network architecture is built on the specific target task and device. As the target task or device is different, the neural architecture we need will be different, too. Rather than redesigning or searching a brand new one, adjusting the existing architecture automatically is an alternative yet efficient way. To this end, we propose a method to Shape the existing Neural Architectures Progressively (SNAP) to adapt the target task and device better. Inspired by the streamline of water drop shaped by air resistance, we define an information density criterion (play the role of resistance) to drive the network architecture reducing the size of the part with the lowest information density. Iteratively, a more adaptive architecture will be obtained progressively in a greedy way. Theoretically, we prove that the greedy strategy is reasonable and can shape a better architecture. Because of the small adjustment of architecture each time, new architecture can inherit the parameters in old architecture to avoid retraining it from scratch. So the proposed method is very efficient in no need of high computation cost. Experimental results show that proposed method can effectively improve the given network by adjusting its architecture. And it can generate different architectures for different tasks and devices to adapt them well. Compared with search-based auto-generated neural architectures, our approach can achieve comparable or even better performance in no need of tremendous computation resources.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Li, Chaojian; Yu, Zhongzhi; Fu, Yonggan; Zhang, Yongan; Zhao, Yang; You, Haoran; Yu, Qixuan; Wang, Yue; Lin, Yingyan
HW-NAS-Bench: Hardware-Aware Neural Architecture Search Benchmark Proceedings Article
In: ICLR 2021, 2021.
@inproceedings{DBLP:journals/corr/abs-2103-10584,
title = {HW-NAS-Bench: Hardware-Aware Neural Architecture Search Benchmark},
author = {Chaojian Li and Zhongzhi Yu and Yonggan Fu and Yongan Zhang and Yang Zhao and Haoran You and Qixuan Yu and Yue Wang and Yingyan Lin},
url = {https://arxiv.org/abs/2103.10584},
year = {2021},
date = {2021-01-01},
booktitle = {ICLR 2021},
journal = {CoRR},
volume = {abs/2103.10584},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pyeon, Myeongjang; Moon, Jihwan; Hahn, Taeyoung; Kim, Gunhee
SEDONA: Search for Decoupled Neural Networks toward Greedy Block-wise Learning Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>pyeon2021sedona,
title = {SEDONA: Search for Decoupled Neural Networks toward Greedy Block-wise Learning},
author = {Myeongjang Pyeon and Jihwan Moon and Taeyoung Hahn and Gunhee Kim},
url = {https://openreview.net/forum?id=XLfdzwNKzch},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhou, Allan; Knowles, Tom; Finn, Chelsea
Meta-learning Symmetries by Reparameterization Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>zhou2021metalearning,
title = {Meta-learning Symmetries by Reparameterization},
author = {Allan Zhou and Tom Knowles and Chelsea Finn},
url = {https://openreview.net/forum?id=-QxT4mJdijq},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Mehrotra, Abhinav; Ramos, Alberto Gil C P; Bhattacharya, Sourav; Ł,; Vipperla, Ravichander; Chau, Thomas; Abdelfattah, Mohamed S; Ishtiaq, Samin; Lane, Nicholas Donald
NAS-Bench-ASR: Reproducible Neural Architecture Search for Speech Recognition Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>mehrotra2021nasbenchasr,
title = {NAS-Bench-ASR: Reproducible Neural Architecture Search for Speech Recognition},
author = {Abhinav Mehrotra and Alberto Gil C P Ramos and Sourav Bhattacharya and Ł and Ravichander Vipperla and Thomas Chau and Mohamed S Abdelfattah and Samin Ishtiaq and Nicholas Donald Lane},
url = {https://openreview.net/forum?id=CU0APx9LMaL},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhou, Daquan; Jin, Xiaojie; Lian, Xiaochen; Yang, Linjie; Xue, Yujing; Hou, Qibin; Feng, Jiashi
AutoSpace: Neural Architecture Search with Less Human Interference Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-11833,
title = {AutoSpace: Neural Architecture Search with Less Human Interference},
author = {Daquan Zhou and Xiaojie Jin and Xiaochen Lian and Linjie Yang and Yujing Xue and Qibin Hou and Jiashi Feng},
url = {https://arxiv.org/abs/2103.11833},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.11833},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Li, Changlin; Tang, Tao; Wang, Guangrun; Peng, Jiefeng; Wang, Bing; Liang, Xiaodan; Chang, Xiaojun
BossNAS: Exploring Hybrid CNN-transformers with Block-wisely Self-supervised Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-12424,
title = {BossNAS: Exploring Hybrid CNN-transformers with Block-wisely Self-supervised Neural Architecture Search},
author = {Changlin Li and Tao Tang and Guangrun Wang and Jiefeng Peng and Bing Wang and Xiaodan Liang and Xiaojun Chang},
url = {https://arxiv.org/abs/2103.12424},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.12424},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Abed, H; Gyires-Tóth, B
Efficient Neural Architecture Search for Long Short-Term Memory Networks Proceedings Article
In: 2021 IEEE 19th World Symposium on Applied Machine Intelligence and Informatics (SAMI), pp. 000287-000292, 2021.
@inproceedings{9378612,
title = {Efficient Neural Architecture Search for Long Short-Term Memory Networks},
author = {H Abed and B Gyires-Tóth},
url = {https://ieeexplore.ieee.org/abstract/document/9378612},
doi = {10.1109/SAMI50585.2021.9378612},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE 19th World Symposium on Applied Machine Intelligence and Informatics (SAMI)},
pages = {000287-000292},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ai, Dige; Zhang, Hong
GNAS: A Generalized Neural Network Architecture Search Framework Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2103-11820,
title = {GNAS: A Generalized Neural Network Architecture Search Framework},
author = {Dige Ai and Hong Zhang},
url = {https://arxiv.org/abs/2103.11820},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2103.11820},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}