Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2022
Shen, Hao; Zhao, Zhong-Qiu; Liao, Wenrui; Tian, Weidong; Huang, De-Shuang
Joint Operation and Attention Block Search for Lightweight Image Restoration Journal Article
In: Pattern Recognition, pp. 108909, 2022, ISSN: 0031-3203.
@article{SHEN2022108909,
title = {Joint Operation and Attention Block Search for Lightweight Image Restoration},
author = {Hao Shen and Zhong-Qiu Zhao and Wenrui Liao and Weidong Tian and De-Shuang Huang},
url = {https://www.sciencedirect.com/science/article/pii/S0031320322003909},
doi = {https://doi.org/10.1016/j.patcog.2022.108909},
issn = {0031-3203},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Pattern Recognition},
pages = {108909},
abstract = {Recently, block-based design methods have shown effectiveness in image restoration tasks, which are usually designed in a handcrafted manner and have computation and memory consumption challenges in practice. In this paper, we propose a joint operation and attention block search algorithm for image restoration, which focuses on searching for optimal combinations of operation blocks and attention blocks. Specifically, we first construct two search spaces: operation block search space and attention block search space. The former is used to explore the suitable operation of each layer and aims to construct a lightweight and effective operation search module (OSM). The latter is applied to discover the optimal connection of various attention mechanisms and aims to enhance the feature expression. The searched structure is called the attention search module (ASM). Then we combine OSM and ASM to construct a joint search module (JSM), which serves as the basic module to build the final network. Moreover, we propose a cross-scale fusion module (CSFM) to effectively integrate multiple hierarchical features from JSMs, which helps to mine feature corrections of intermediate layers. Extensive experiments on image super-resolution, gray image denoising, and JPEG image deblocking tasks demonstrate that our proposed network can achieve competitive performance. The source code is available on https://github.com/it-hao/JSNet.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhou, Zixuan; Ning, Xuefei; Cai, Yi; Han, Jiashu; Deng, Yiping; Dong, Yuhan; Yang, Huazhong; Wang, Yu
CLOSE: Curriculum Learning On the Sharing Extent Towards Better One-shot NAS Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2207-07868,
title = {CLOSE: Curriculum Learning On the Sharing Extent Towards Better One-shot NAS},
author = {Zixuan Zhou and Xuefei Ning and Yi Cai and Jiashu Han and Yiping Deng and Yuhan Dong and Huazhong Yang and Yu Wang},
url = {https://doi.org/10.48550/arXiv.2207.07868},
doi = {10.48550/arXiv.2207.07868},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2207.07868},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Hafner, Frank M.; Zeller, Matthias; Schutera, Mark; Abhau, Jochen; Kooij, Julian F. P.
BackboneAnalysis: Structured Insights into Compute Platforms from CNN Inference Latency Proceedings Article
In: 2022 IEEE Intelligent Vehicles Symposium (IV), pp. 1801-1809, 2022.
@inproceedings{9827260,
title = {BackboneAnalysis: Structured Insights into Compute Platforms from CNN Inference Latency},
author = {Frank M. Hafner and Matthias Zeller and Mark Schutera and Jochen Abhau and Julian F. P. Kooij},
url = {https://ieeexplore.ieee.org/abstract/document/9827260},
doi = {10.1109/IV51971.2022.9827260},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE Intelligent Vehicles Symposium (IV)},
pages = {1801-1809},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Deng, Shuchao; Sun, Yanan; Galvan, Edgar
Neural Architecture Search Using Genetic Algorithm for Facial Expression Recognition Proceedings Article
In: Proceedings of the Genetic and Evolutionary Computation Conference Companion, pp. 423–426, Association for Computing Machinery, Boston, Massachusetts, 2022, ISBN: 9781450392686.
@inproceedings{10.1145/3520304.3528884,
title = {Neural Architecture Search Using Genetic Algorithm for Facial Expression Recognition},
author = {Shuchao Deng and Yanan Sun and Edgar Galvan},
url = {https://doi.org/10.1145/3520304.3528884},
doi = {10.1145/3520304.3528884},
isbn = {9781450392686},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference Companion},
pages = {423–426},
publisher = {Association for Computing Machinery},
address = {Boston, Massachusetts},
series = {GECCO '22},
abstract = {Facial expression is one of the most powerful, natural, and universal signals for human beings to express emotional states and intentions. Thus, it is evident the importance of correct and innovative facial expression recognition (FER) approaches in Artificial Intelligence. The current common practice for FER is to correctly design convolutional neural networks' architectures (CNNs) using human expertise. However, finding a well-performing architecture is often a very tedious and error-prone process for deep learning researchers. Neural architecture search (NAS) is an area of growing interest as demonstrated by the large number of scientific works published in recent years thanks to the impressive results achieved in recent years. We propose a genetic algorithm approach that uses an ingenious encoding-decoding mechanism that allows to automatically evolve CNNs on FER tasks attaining high accuracy classification rates. The experimental results demonstrate that the proposed algorithm achieves the best-known results on the CK+ and FERG datasets as well as competitive results on the JAFFE dataset.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Deng, Shuchao; Sun, Yanan; Galvan, Edgar
Neural Architecture Search Using Genetic Algorithm for Facial Expression Recognition Proceedings Article
In: Proceedings of the Genetic and Evolutionary Computation Conference Companion, pp. 423–426, Association for Computing Machinery, Boston, Massachusetts, 2022, ISBN: 9781450392686.
@inproceedings{10.1145/3520304.3528884b,
title = {Neural Architecture Search Using Genetic Algorithm for Facial Expression Recognition},
author = {Shuchao Deng and Yanan Sun and Edgar Galvan},
url = {https://doi.org/10.1145/3520304.3528884},
doi = {10.1145/3520304.3528884},
isbn = {9781450392686},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference Companion},
pages = {423–426},
publisher = {Association for Computing Machinery},
address = {Boston, Massachusetts},
series = {GECCO '22},
abstract = {Facial expression is one of the most powerful, natural, and universal signals for human beings to express emotional states and intentions. Thus, it is evident the importance of correct and innovative facial expression recognition (FER) approaches in Artificial Intelligence. The current common practice for FER is to correctly design convolutional neural networks' architectures (CNNs) using human expertise. However, finding a well-performing architecture is often a very tedious and error-prone process for deep learning researchers. Neural architecture search (NAS) is an area of growing interest as demonstrated by the large number of scientific works published in recent years thanks to the impressive results achieved in recent years. We propose a genetic algorithm approach that uses an ingenious encoding-decoding mechanism that allows to automatically evolve CNNs on FER tasks attaining high accuracy classification rates. The experimental results demonstrate that the proposed algorithm achieves the best-known results on the CK+ and FERG datasets as well as competitive results on the JAFFE dataset.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ye-Qun; Chen, Chun-Hua; Zhang, Jun; Zhan, Zhi-Hui
Dropout Topology-Assisted Bidirectional Learning Particle Swarm Optimization for Neural Architecture Search Proceedings Article
In: Proceedings of the Genetic and Evolutionary Computation Conference Companion, pp. 93–96, Association for Computing Machinery, Boston, Massachusetts, 2022, ISBN: 9781450392686.
@inproceedings{10.1145/3520304.3528919,
title = {Dropout Topology-Assisted Bidirectional Learning Particle Swarm Optimization for Neural Architecture Search},
author = {Ye-Qun Wang and Chun-Hua Chen and Jun Zhang and Zhi-Hui Zhan},
url = {https://doi.org/10.1145/3520304.3528919},
doi = {10.1145/3520304.3528919},
isbn = {9781450392686},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference Companion},
pages = {93–96},
publisher = {Association for Computing Machinery},
address = {Boston, Massachusetts},
series = {GECCO '22},
abstract = {The neural architecture search (NAS) is a new high-complexity optimization problem emerging in recent years. Solving NAS is challenging for optimization algorithms due to the following two issues. Firstly, besides the network architectures, there are also network hyperparameters that need to be optimized, which causes the search space of NAS to be complex and large and poses a great challenge to the optimization ability of the optimization algorithm. Secondly, NAS is an expensive optimization problem with expensive computational time to evaluate candidates, which poses a great challenge to the search speed and convergence of the optimization algorithm. Therefore, this paper proposes a novel dropout topology-assisted bidirectional learning particle swarm optimization (DBLPSO) algorithm for NAS to tackle these two issues. Firstly, inspired by the dropout technique in deep learning, a sorting-assisted dropout-based neighbor topology is proposed to enhance the population diversity and the optimization ability of PSO. Secondly, a bidirectional learning strategy is proposed to improve search speed and accelerate PSO convergence. In the experiments, the performance of the DBLPSO algorithm is evaluated on 10 tabular benchmarks based on NAS-bench 201, NATS-bench, and HPO-bench, by comparing with four NAS algorithms that have achieved state-of-the-art results on the NAS tabular benchmarks. The experimental results show that DBLPSO can obtain great performance for NAS and is superior to those NAS algorithms in comparison.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Hadjadj, Lies; Deschamps, Alexis; Rauch, Edgar; Amini, Massih-Reza; Veron, Muriel; Louhichi, Sana
Neural Architecture Search for Transmission Electron Microscopy: Rapid Automation of Phase and Orientation Determination in TEM images Journal Article
In: Microscopy and Microanalysis, vol. 28, no. S1, pp. 3166–3169, 2022.
@article{hadjadj_deschamps_rauch_amini_veron_louhichi_2022,
title = {Neural Architecture Search for Transmission Electron Microscopy: Rapid Automation of Phase and Orientation Determination in TEM images},
author = {Lies Hadjadj and Alexis Deschamps and Edgar Rauch and Massih-Reza Amini and Muriel Veron and Sana Louhichi},
doi = {10.1017/S1431927622011758},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Microscopy and Microanalysis},
volume = {28},
number = {S1},
pages = {3166–3169},
publisher = {Cambridge University Press},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Milesi, Alexandre; Futrega, Michal; Marcinkiewicz, Michal; Ribalta, Pablo
Brain Tumor Segmentation Using Neural Network Topology Search Proceedings Article
In: Crimi, Alessandro; Bakas, Spyridon (Ed.): Brainlesion: Glioma, Multiple Sclerosis, Stroke and Traumatic Brain Injuries, pp. 366–376, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-08999-2.
@inproceedings{10.1007/978-3-031-08999-2_31,
title = {Brain Tumor Segmentation Using Neural Network Topology Search},
author = {Alexandre Milesi and Michal Futrega and Michal Marcinkiewicz and Pablo Ribalta},
editor = {Alessandro Crimi and Spyridon Bakas},
url = {https://link.springer.com/chapter/10.1007/978-3-031-08999-2_31},
isbn = {978-3-031-08999-2},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Brainlesion: Glioma, Multiple Sclerosis, Stroke and Traumatic Brain Injuries},
pages = {366--376},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {We apply a method from Automated Machine Learning (AutoML), namely Neural Architecture Search (NAS), to the task of brain tumor segmentation in MRIs for the BraTS 2021 challenge. NAS methods are known to be compute-intensive, so we use a continuous and differentiable search space in order to apply a DiNTS search for optimal fully convolutional architectures. Our method obtained Dice scores of 0.9161, 0.8707 and 0.8537 for whole tumor, tumor core and enhancing tumor regions respectively on the test dataset, while requiring no manual design of the network architecture, which was found automatically from the provided training data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Qiang; Shi, Shaohuai; Zhao, Kaiyong; Chu, Xiaowen
EASNet: Searching Elastic and Accurate Network Architecture for Stereo Matching Proceedings Article
In: European Conference on Computer Vision, 2022.
@inproceedings{<LineBreak>wang2022easnet,
title = {EASNet: Searching Elastic and Accurate Network Architecture for Stereo Matching},
author = {Qiang Wang and Shaohuai Shi and Kaiyong Zhao and Xiaowen Chu},
url = {https://arxiv.org/pdf/xxxx.xxxxx.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {European Conference on Computer Vision},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Noda, Yuhei; Saito, Shota; Shirakawa, Shinichi
Efficient Search of Multiple Neural Architectures with Different Complexities via Importance Sampling Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2207-10334,
title = {Efficient Search of Multiple Neural Architectures with Different Complexities via Importance Sampling},
author = {Yuhei Noda and Shota Saito and Shinichi Shirakawa},
url = {https://doi.org/10.48550/arXiv.2207.10334},
doi = {10.48550/arXiv.2207.10334},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2207.10334},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wu, Yushu; Gong, Yifan; Zhao, Pu; Li, Yanyu; Zhan, Zheng; Niu, Wei; Tang, Hao; Qin, Minghai; Ren, Bin; Wang, Yanzhi
Compiler-Aware Neural Architecture Search for On-Mobile Real-time Super-Resolution Technical Report
2022.
@techreport{wu2022compiler,
title = {Compiler-Aware Neural Architecture Search for On-Mobile Real-time Super-Resolution},
author = {Yushu Wu and Yifan Gong and Pu Zhao and Yanyu Li and Zheng Zhan and Wei Niu and Hao Tang and Minghai Qin and Bin Ren and Yanzhi Wang},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint arXiv:2207.12577},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Äzarmehr, Neda; Shephard, Adam; Mahmood, Hanya; Rajpoot, Nasir; Khurram, Syed Ali"
A Neural Architecture Search Based Framework for Segmentation of Epithelium, Nuclei and Oral Epithelial Dysplasia Grading Proceedings Article
In: Yang, Guang; Aviles-Rivero, Angelica; Roberts, Michael; Schönlieb, Carola-Bibiane (Ed.): Medical Image Understanding and Analysis, pp. 357–370, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-12053-4.
@inproceedings{10.1007/978-3-031-12053-4_27,
title = {A Neural Architecture Search Based Framework for Segmentation of Epithelium, Nuclei and Oral Epithelial Dysplasia Grading},
author = {Neda Äzarmehr and Adam Shephard and Hanya Mahmood and Nasir Rajpoot and Syed Ali" Khurram},
editor = {Guang Yang and Angelica Aviles-Rivero and Michael Roberts and Carola-Bibiane Schönlieb},
url = {https://link.springer.com/chapter/10.1007/978-3-031-12053-4_27},
isbn = {978-3-031-12053-4},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Medical Image Understanding and Analysis},
pages = {357--370},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Öral epithelial dysplasia (OED) is a pre-cancerous histopathological diagnosis given to a range of oral lesions. Architectural, cytological and histological features of OED can be modelled through the segmentation of full epithelium, individual nuclei and stroma (connective tissues) to provide significant diagnostic features. In this paper, we explore a customised neural architecture search (NAS) based method for optimisation of an efficient architecture for segmentation of the full epithelium and individual nuclei in pathology whole slide images (WSIs). Our initial experimental results show that the NAS-derived architecture achieves 93.5% F1-score for the full epithelium segmentation and 94.5% for nuclear segmentation outperforming other state-of-the-art models. Accurate nuclear segmentation allows us to perform quantitative statistical and morphometric feature analyses of the segmented nuclei within regions of interest (ROIs) of multi-gigapixel whole-slide images (WSIs). We show that a random forest model using these features can differentiate between low-risk and high-risk OED lesions."},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jing, Kun; Xu, Jungang; Li, Pengfei
Graph Masked Autoencoder Enhanced Predictor for Neural Architecture Search Proceedings Article
In: Raedt, Luc De (Ed.): Proceedings of the Thirty-First International Joint Conference on Artificial Intelligence, IJCAI 2022, Vienna, Austria, 23-29 July 2022, pp. 3114–3120, ijcai.org, 2022.
@inproceedings{DBLP:conf/ijcai/JingXL22,
title = {Graph Masked Autoencoder Enhanced Predictor for Neural Architecture Search},
author = {Kun Jing and Jungang Xu and Pengfei Li},
editor = {Luc De Raedt},
url = {https://doi.org/10.24963/ijcai.2022/432},
doi = {10.24963/ijcai.2022/432},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Thirty-First International Joint Conference on
Artificial Intelligence, IJCAI 2022, Vienna, Austria, 23-29 July
2022},
pages = {3114--3120},
publisher = {ijcai.org},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Noda, Yuhei; Saito, Shota; Shirakawa, Shinichi
Efficient Search of Multiple Neural Architectures with Different Complexities via Importance Sampling Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2207-10334b,
title = {Efficient Search of Multiple Neural Architectures with Different Complexities via Importance Sampling},
author = {Yuhei Noda and Shota Saito and Shinichi Shirakawa},
url = {https://doi.org/10.48550/arXiv.2207.10334},
doi = {10.48550/arXiv.2207.10334},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2207.10334},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Duggal, Rahul; Zhou, Hao; Yang, Shuo; Fang, Jun; Xiong, Yuanjun; Xia, Wei
Towards regression-free neural networks for diverse compute platforms Proceedings Article
In: ECCV 2022, 2022.
@inproceedings{Duggal2022,
title = {Towards regression-free neural networks for diverse compute platforms},
author = {Rahul Duggal and Hao Zhou and Shuo Yang and Jun Fang and Yuanjun Xiong and Wei Xia},
url = {https://www.amazon.science/publications/towards-regression-free-neural-networks-for-diverse-compute-platforms},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {ECCV 2022},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhu, Guijing; Ma, Long; Fan, Xin; Liu, Risheng
Hierarchical Bilevel Learning with Architecture and Loss Search for Hadamard-based Image Restoration Proceedings Article
In: Raedt, Luc De (Ed.): Proceedings of the Thirty-First International Joint Conference on Artificial Intelligence, IJCAI 2022, Vienna, Austria, 23-29 July 2022, pp. 1757–1764, ijcai.org, 2022.
@inproceedings{DBLP:conf/ijcai/Zhu00L22,
title = {Hierarchical Bilevel Learning with Architecture and Loss Search for Hadamard-based Image Restoration},
author = {Guijing Zhu and Long Ma and Xin Fan and Risheng Liu},
editor = {Luc De Raedt},
url = {https://doi.org/10.24963/ijcai.2022/245},
doi = {10.24963/ijcai.2022/245},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Thirty-First International Joint Conference on
Artificial Intelligence, IJCAI 2022, Vienna, Austria, 23-29 July
2022},
pages = {1757--1764},
publisher = {ijcai.org},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jin, Guangyin; Li, Fuxian; Zhang, Jinlei; Wang, Mudan; Huang, Jincai
Automated Dilated Spatio-Temporal Synchronous Graph Modeling for Traffic Prediction Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2207-10830,
title = {Automated Dilated Spatio-Temporal Synchronous Graph Modeling for Traffic Prediction},
author = {Guangyin Jin and Fuxian Li and Jinlei Zhang and Mudan Wang and Jincai Huang},
url = {https://doi.org/10.48550/arXiv.2207.10830},
doi = {10.48550/arXiv.2207.10830},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2207.10830},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Zexiang; Li, Dong; Lu, Kaiyue; Qin, Zhen; Sun, Weixuan; Xu, Jiacheng; Zhong, Yiran
Neural Architecture Search on Efficient Transformers and Beyond Technical Report
2022.
@techreport{liu2022neural,
title = {Neural Architecture Search on Efficient Transformers and Beyond},
author = {Zexiang Liu and Dong Li and Kaiyue Lu and Zhen Qin and Weixuan Sun and Jiacheng Xu and Yiran Zhong},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint arXiv:2207.13955},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Tran, Dai Quoc; Park, Minsoo; Jeon, Yuntae; Bak, Jinyeong; Park, Seunghee
Forest-Fire Response System Using Deep-Learning-Based Approaches With CCTV Images and Weather Data Journal Article
In: IEEE Access, vol. 10, pp. 66061–66071, 2022.
@article{DBLP:journals/access/TranPJBP22,
title = {Forest-Fire Response System Using Deep-Learning-Based Approaches With CCTV Images and Weather Data},
author = {Dai Quoc Tran and Minsoo Park and Yuntae Jeon and Jinyeong Bak and Seunghee Park},
url = {https://doi.org/10.1109/ACCESS.2022.3184707},
doi = {10.1109/ACCESS.2022.3184707},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Access},
volume = {10},
pages = {66061--66071},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lim, Heechul; Kim, Min-Soo
TENAS: Using Taylor Expansion and Channel-level Skip Connection for Neural Architecture Search Journal Article
In: IEEE Access, pp. 1-1, 2022.
@article{9845403,
title = {TENAS: Using Taylor Expansion and Channel-level Skip Connection for Neural Architecture Search},
author = {Heechul Lim and Min-Soo Kim},
url = {https://ieeexplore.ieee.org/document/9845403},
doi = {10.1109/ACCESS.2022.3195208},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Access},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ying, Weiqin; Yang, Kaihao; Wu, Yu; Li, Junhui; Zhou, Zhekun; Huang, Banban
Multi-objective Evolutionary Architecture Search of U-Net with Diamond Atrous Convolution Proceedings Article
In: Li, Kangshun; Liu, Yong; Wang, Wenxiang (Ed.): Exploration of Novel Intelligent Optimization Algorithms, pp. 31–40, Springer Nature Singapore, Singapore, 2022, ISBN: 978-981-19-4109-2.
@inproceedings{10.1007/978-981-19-4109-2_4,
title = {Multi-objective Evolutionary Architecture Search of U-Net with Diamond Atrous Convolution},
author = {Weiqin Ying and Kaihao Yang and Yu Wu and Junhui Li and Zhekun Zhou and Banban Huang},
editor = {Kangshun Li and Yong Liu and Wenxiang Wang},
url = {https://link.springer.com/chapter/10.1007/978-981-19-4109-2_4},
isbn = {978-981-19-4109-2},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Exploration of Novel Intelligent Optimization Algorithms},
pages = {31--40},
publisher = {Springer Nature Singapore},
address = {Singapore},
abstract = {Ü-Net and its variants have played important roles in the field of medical image segmentation. However, U-Nets based on conventional 3 * 3 convolution still have some shortcomings, such as the lack of deformation of receptive field. In addition, due to the limited computing resources and memory space on many machines, the allowed sizes of networks deployed on them are also limited. However, it may not be effective to manually design the architectures of U-Nets. In this paper, a U-Net architecture with diamond atrous convolution (DAU-Net) is presented. Furthermore, a multi-objective neural architecture search method with channel sorting of DAU-Net is proposed to search for the better U-Net architectures. Experimental results on the ISIC 2018 dataset of melanoma segmentation show that the proposed method obtains a series of network architectures with different sizes, and the obtained architectures achieve obvious improvements in term of both model sizes and prediction accuracies compared with several popular and manually designed variants of U-Net."},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yu, Hongjiu; Sun, Qiancheng; Hu, Jin; Xue, Xingyuan; Luo, Jixiang; He, Dailan; Li, Yilong; Wang, Pengbo; Wang, Yuanyuan; Dai, Yaxu; Wang, Yan; Qin, Hongwei
Evaluating the Practicality of Learned Image Compression Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2207-14524,
title = {Evaluating the Practicality of Learned Image Compression},
author = {Hongjiu Yu and Qiancheng Sun and Jin Hu and Xingyuan Xue and Jixiang Luo and Dailan He and Yilong Li and Pengbo Wang and Yuanyuan Wang and Yaxu Dai and Yan Wang and Hongwei Qin},
url = {https://doi.org/10.48550/arXiv.2207.14524},
doi = {10.48550/arXiv.2207.14524},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2207.14524},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Ma, Jiaxiang
Pruning threshold search algorithm combined with PDARTS Proceedings Article
In: Wang, Lidan; Cen, Mengyi (Milly) (Ed.): 4th International Conference on Information Science, Electrical, and Automation Engineering (ISEAE 2022), pp. 382 – 387, International Society for Optics and Photonics SPIE, 2022.
@inproceedings{10.1117/12.2640465,
title = {Pruning threshold search algorithm combined with PDARTS},
author = {Jiaxiang Ma},
editor = {Lidan Wang and Mengyi (Milly) Cen},
url = {https://doi.org/10.1117/12.2640465},
doi = {10.1117/12.2640465},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {4th International Conference on Information Science, Electrical, and Automation Engineering (ISEAE 2022)},
volume = {12257},
pages = {382 -- 387},
publisher = {SPIE},
organization = {International Society for Optics and Photonics},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Yang; Lu, Jun
Double Loss Block Neural Architecture Search Proceedings Article
In: 2022 IEEE 10th Joint International Information Technology and Artificial Intelligence Conference (ITAIC), pp. 731-734, 2022.
@inproceedings{9836540,
title = {Double Loss Block Neural Architecture Search},
author = {Yang Liu and Jun Lu},
url = {https://ieeexplore.ieee.org/abstract/document/9836540},
doi = {10.1109/ITAIC54216.2022.9836540},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE 10th Joint International Information Technology and Artificial Intelligence Conference (ITAIC)},
volume = {10},
pages = {731-734},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Mariama, Diallo; Sun, Liang
i-DARTS: Improving differentiable architecture search by using graph and few-shot learning Proceedings Article
In: 2022 IEEE International Conference on Artificial Intelligence and Computer Applications (ICAICA), pp. 14-19, 2022.
@inproceedings{9844464,
title = {i-DARTS: Improving differentiable architecture search by using graph and few-shot learning},
author = {Diallo Mariama and Liang Sun},
url = {https://ieeexplore.ieee.org/abstract/document/9844464},
doi = {10.1109/ICAICA54878.2022.9844464},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Artificial Intelligence and Computer Applications (ICAICA)},
pages = {14-19},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pau, Danilo; Ambrose, Prem Kumar
Automated Neural and On-Device Learning for Micro Controllers Proceedings Article
In: 2022 IEEE 21st Mediterranean Electrotechnical Conference (MELECON), pp. 758-763, 2022.
@inproceedings{9843050,
title = {Automated Neural and On-Device Learning for Micro Controllers},
author = {Danilo Pau and Prem Kumar Ambrose},
url = {https://ieeexplore.ieee.org/abstract/document/9843050},
doi = {10.1109/MELECON53508.2022.9843050},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE 21st Mediterranean Electrotechnical Conference (MELECON)},
pages = {758-763},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pang, Tianji; Zhao, Shijie; Han, Junwei; Zhang, Shu; Guo, Lei; Liu, Tianming
Gumbel-Softmax based Neural Architecture Search for Hierarchical Brain Networks Decomposition Journal Article
In: Medical Image Analysis, pp. 102570, 2022, ISSN: 1361-8415.
@article{PANG2022102570,
title = {Gumbel-Softmax based Neural Architecture Search for Hierarchical Brain Networks Decomposition},
author = {Tianji Pang and Shijie Zhao and Junwei Han and Shu Zhang and Lei Guo and Tianming Liu},
url = {https://www.sciencedirect.com/science/article/pii/S1361841522002110},
doi = {https://doi.org/10.1016/j.media.2022.102570},
issn = {1361-8415},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Medical Image Analysis},
pages = {102570},
abstract = {Understanding the brain's functional architecture has been an important topic in the neuroimaging field. A variety of brain network modeling methods have been proposed. Recently, deep neural network-based methods have shown a great advantage in modeling the hierarchical and complex functional brain networks (FBNs). However, most of these deep neural networks were handcrafted, making it time-consuming to find the relatively optimal architecture. To address this problem, we propose a novel unsupervised differentiable neural architecture search (NAS) algorithm, named Gumbel-Softmax based Neural Architecture Search (GS-NAS), to automate the architecture design of deep belief network (DBN) for hierarchical FBN decomposition. Specifically, we introduce the Gumbel-Softmax scheme to reframe the discrete architecture sampling procedure during NAS to be continuous. Guided by the reconstruction error minimization procedure, the architecture search can be driven by the intrinsic functional architecture of the brain, thereby revealing the possible hierarchical functional brain organization via DBN structure. The proposed GS-NAS algorithm can simultaneously optimize the number of hidden units for each layer and the network depth. Extensive experiment results on both task and resting-state functional magnetic resonance imaging data have demonstrated the effectiveness and efficiency of the proposed GS-NAS model. The identified hierarchically organized FBNs provide novel insight into understanding human brain function.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Tong, Lyuyang; Du, Bo
Neural architecture search via reference point based multi-objective evolutionary algorithm Journal Article
In: Pattern Recognition, pp. 108962, 2022, ISSN: 0031-3203.
@article{TONG2022108962,
title = {Neural architecture search via reference point based multi-objective evolutionary algorithm},
author = {Lyuyang Tong and Bo Du},
url = {https://www.sciencedirect.com/science/article/pii/S0031320322004423},
doi = {https://doi.org/10.1016/j.patcog.2022.108962},
issn = {0031-3203},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Pattern Recognition},
pages = {108962},
abstract = {For neural architecture search, NSGA-Net has searched a representative neural architecture set of Pareto-optimal solutions to consider both accuracy and computation complexity simultaneously. However, some decision-makers only concentrate on such neural architectures in the subpart regions of Pareto-optimal Frontier that they have interests in. Under the above circumstances, certain uninterested neural architectures may cost many computing resources. In order to consider the preference of decision-makers, we propose the reference point based NSGA-Net (RNSGA-Net) for neural architecture search. The core of RNSGA-Net adopts the reference point approach to guarantee the Pareto-optimal region close to the reference points and also combines the advantage of NSGAII with the fast nondominated sorting approach to split the Pareto front. Moreover, we augment an extra bit value of the original encoding to represent two types of residual block and one type of dense block for residual connection and dense connection in the RNSGA-Net. In order to satisfy the decision-maker preference, the multi-objective is measured to search competitive neural architecture by minimizing an error metric and FLOPs of computational complexity. Experiment results on the CIFAR-10 dataset demonstrate that RNSGA-Net can improve NSGA-Net in terms of the more structured representation space and the preference of decision-makers.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lu, Zhichao; Cheng, Ran; Jin, Yaochu; Tan, Kay Chen; Deb, Kalyanmoy
Neural Architecture Search as Multiobjective Optimization Benchmarks: Problem Formulation and Performance Assessment Technical Report
2022.
@techreport{lu2022neural,
title = {Neural Architecture Search as Multiobjective Optimization Benchmarks: Problem Formulation and Performance Assessment},
author = {Zhichao Lu and Ran Cheng and Yaochu Jin and Kay Chen Tan and Kalyanmoy Deb},
url = {https://arxiv.org/abs/2208.04321},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint arXiv:2208.04321},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Shi-Xin; Hsieh, Chang-Yu; Zhang, Shengyu; Yao, Hong
Differentiable quantum architecture search Journal Article
In: Quantum Science and Technology, 2022.
@article{10.1088/2058-9565/ac87cd,
title = {Differentiable quantum architecture search},
author = {Shi-Xin Zhang and Chang-Yu Hsieh and Shengyu Zhang and Hong Yao},
url = {http://iopscience.iop.org/article/10.1088/2058-9565/ac87cd},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Quantum Science and Technology},
abstract = {Quantum architecture search (QAS) is the process of automating architecture engineering of quantum circuits. It has been desired to construct a powerful and general QAS platform which can significantly accelerate current efforts to identify quantum advantages of error-prone and depth-limited quantum circuits in the NISQ era. Hereby, we propose a general framework of differentiable quantum architecture search (DQAS), which enables automated designs of quantum circuits in an end-to-end differentiable fashion. We present several examples of circuit design problems to demonstrate the power of DQAS. For instance, unitary operations are decomposed into quantum gates, noisy circuits are re-designed to improve accuracy, and circuit layouts for quantum approximation optimization algorithm are automatically discovered and upgraded for combinatorial optimization problems. These results not only manifest the vast potential of DQAS being an essential tool for the NISQ application developments, but also present an interesting research topic from the theoretical perspective as it draws inspirations from the newly emerging interdisciplinary paradigms of differentiable programming, probabilistic programming, and quantum programming.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Peng, Jie; Liu, Haijun; Zhao, Zhongjin; Li, Zhiwei; Liu, Sen; Li, Qingjiang
CMQ: Crossbar-aware Neural Network Mixed-precision Quantization via Differentiable Architecture Search Journal Article
In: IEEE Transactions on Computer-Aided Design of Integrated Circuits and Systems, pp. 1-1, 2022.
@article{9852786,
title = {CMQ: Crossbar-aware Neural Network Mixed-precision Quantization via Differentiable Architecture Search},
author = {Jie Peng and Haijun Liu and Zhongjin Zhao and Zhiwei Li and Sen Liu and Qingjiang Li},
doi = {10.1109/TCAD.2022.3197495},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Computer-Aided Design of Integrated Circuits and Systems},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, Lumin; Jin, Sheng; Liu, Wentao; Qian, Chen; Ouyang, Wanli; Luo, Ping; Wang, Xiaogang
ZoomNAS: Searching for Whole-body Human Pose Estimation in the Wild Journal Article
In: IEEE Transactions on Pattern Analysis and Machine Intelligence, pp. 1-18, 2022.
@article{9852279,
title = {ZoomNAS: Searching for Whole-body Human Pose Estimation in the Wild},
author = {Lumin Xu and Sheng Jin and Wentao Liu and Chen Qian and Wanli Ouyang and Ping Luo and Xiaogang Wang},
url = {https://ieeexplore.ieee.org/abstract/document/9852279},
doi = {10.1109/TPAMI.2022.3197352},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
pages = {1-18},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ye, Peng; Li, Baopu; Chen, Tao; Fan, Jiayuan; Mei, Zhen; Lin, Chen; Zuo, Chongyan; Chi, Qinghua; Ouyan, Wanli
Efficient Joint-Dimensional Search with Solution Space Regularization for Real-Time Semantic Segmentation Miscellaneous
2022.
@misc{https://doi.org/10.48550/arxiv.2208.05271,
title = {Efficient Joint-Dimensional Search with Solution Space Regularization for Real-Time Semantic Segmentation},
author = {Peng Ye and Baopu Li and Tao Chen and Jiayuan Fan and Zhen Mei and Chen Lin and Chongyan Zuo and Qinghua Chi and Wanli Ouyan},
url = {https://arxiv.org/abs/2208.05271},
doi = {10.48550/ARXIV.2208.05271},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Wang, Zhen; Wei, Zhewei; Li, Yaliang; Kuang, Weirui; Ding, Bolin
Graph Neural Networks with Node-Wise Architecture Proceedings Article
In: Proceedings of the 28th ACM SIGKDD Conference on Knowledge Discovery and Data Mining, pp. 1949–1958, Association for Computing Machinery, Washington DC, USA, 2022, ISBN: 9781450393850.
@inproceedings{10.1145/3534678.3539387,
title = {Graph Neural Networks with Node-Wise Architecture},
author = {Zhen Wang and Zhewei Wei and Yaliang Li and Weirui Kuang and Bolin Ding},
url = {https://doi.org/10.1145/3534678.3539387},
doi = {10.1145/3534678.3539387},
isbn = {9781450393850},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the 28th ACM SIGKDD Conference on Knowledge Discovery and Data Mining},
pages = {1949–1958},
publisher = {Association for Computing Machinery},
address = {Washington DC, USA},
series = {KDD '22},
abstract = {Recently, Neural Architecture Search (NAS) for GNN has received increasing popularity as it can seek an optimal architecture for a given new graph. However, the optimal architecture is applied to all the instances (i.e., nodes, in the context of graph) equally, which might be insufficient to handle the diverse local patterns ingrained in a graph, as shown in this paper and some very recent studies. Thus, we argue the necessity of node-wise architecture search for GNN. Nevertheless, node-wise architecture cannot be realized by trivially applying NAS methods node by node due to the scalability issue and the need for determining test nodes' architectures. To tackle these challenges, we propose a framework wherein the parametric controllers decide the GNN architecture for each node based on its local patterns. We instantiate our framework with depth, aggregator and resolution controllers, and then elaborate on learning the backbone GNN model and the controllers to encourage their cooperation. Empirically, we justify the effects of node-wise architecture through the performance improvements introduced by the three controllers, respectively. Moreover, our proposed framework significantly outperforms state-of-the-art methods on five of the ten real-world datasets, where the diversity of these datasets has hindered any graph convolution-based method to lead on them simultaneously. This result further confirms that node-wise architecture can help GNNs become versatile models.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lu, Zhichao; Cheng, Ran; Huang, Shihua; Zhang, Haoming; Qiu, Changxiao; Yang, Fan
Surrogate-assisted Multi-objective Neural Architecture Search for Real-time Semantic Segmentation Technical Report
2022.
@techreport{lu2022surrogate,
title = {Surrogate-assisted Multi-objective Neural Architecture Search for Real-time Semantic Segmentation},
author = {Zhichao Lu and Ran Cheng and Shihua Huang and Haoming Zhang and Changxiao Qiu and Fan Yang},
url = {https://arxiv.org/abs/2208.06820},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint arXiv:2208.06820},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Lopes, Vasco; Santos, Miguel; Degardin, Bruno; Alexandre, Lu'is A
Guided Evolutionary Neural Architecture Search With Efficient Performance Estimation Technical Report
2022.
@techreport{lopes2022guided,
title = {Guided Evolutionary Neural Architecture Search With Efficient Performance Estimation},
author = {Vasco Lopes and Miguel Santos and Bruno Degardin and Lu'is A Alexandre},
url = {https://arxiv.org/abs/2208.06475},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint arXiv:2208.06475},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Öchoa, Gabriela; Veerapen, Nadarajen"
Neural Architecture Search: A Visual Analysis Proceedings Article
In: Rudolph, Günter; Kononova, Anna V.; Aguirre, Hernán; Kerschke, Pascal; Ochoa, Gabriela; Tušar, Tea (Ed.): Parallel Problem Solving from Nature -- PPSN XVII, pp. 603–615, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-14714-2.
@inproceedings{10.1007/978-3-031-14714-2_42,
title = {Neural Architecture Search: A Visual Analysis},
author = {Gabriela Öchoa and Nadarajen" Veerapen},
editor = {Günter Rudolph and Anna V. Kononova and Hernán Aguirre and Pascal Kerschke and Gabriela Ochoa and Tea Tušar},
url = {https://link.springer.com/chapter/10.1007/978-3-031-14714-2_42},
isbn = {978-3-031-14714-2},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Parallel Problem Solving from Nature -- PPSN XVII},
pages = {603--615},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Neural architecture search (NAS) refers to the use of search heuristics to optimise the topology of deep neural networks. NAS algorithms have produced topologies that outperform human-designed ones. However, contrasting alternative NAS methods is difficult. To address this, several tabular NAS benchmarks have been proposed that exhaustively evaluate all architectures in a given search space. We conduct a thorough fitness landscape analysis of a popular tabular, cell-based NAS benchmark. Our results indicate that NAS landscapes are multi-modal, but have a relatively low number of local optima, from which it is not hard to escape. We confirm that reducing the noise in estimating performance reduces the number of local optima. We hypothesise that local-search based NAS methods are likely to be competitive, which we confirm by implementing a landscape-aware iterated local search algorithm that can outperform more elaborate evolutionary and reinforcement learning NAS methods.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chen, Yaofo; Guo, Yong; Chen, Peihao; Wang, Jingdong; Wang, Yaowei; Song, Hengjie; Tan, Mingkui
Automatic Subspace Evoking for Efficient Neural Architecture Search Technical Report
2022.
@techreport{chen2022automatic,
title = {Automatic Subspace Evoking for Efficient Neural Architecture Search},
author = {Yaofo Chen and Yong Guo and Peihao Chen and Jingdong Wang and Yaowei Wang and Hengjie Song and Mingkui Tan},
url = {https://chenyaofo.com/papers/chen-automatic-subspace-evoking-for-efficient-neural-architecture-search.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {arXiv preprint},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Eslami, Saeedeh; Monsefi, Reza; Akbari, Mohammad
Towards Leveraging Structure for Neural Predictor in NAS Journal Article
In: Computer and Knowledge Engineering, pp. -, 2022, ISSN: 2538-5453.
@article{nokey,
title = {Towards Leveraging Structure for Neural Predictor in NAS},
author = {Saeedeh Eslami and Reza Monsefi and Mohammad Akbari},
url = {https://cke.um.ac.ir/article_42708.html},
doi = {10.22067/cke.2022.73356.1031},
issn = {2538-5453},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computer and Knowledge Engineering},
pages = {-},
publisher = {Ferdowsi University of Mashhad},
abstract = {Neural Architecture Search (NAS), which automatically designs a neural architecture for a specific task, has attracted much attention in recent years. Properly defining the search space is a key step in the success of NAS approaches, which allows us to reduce the required time for evaluation. Thus, late strategies for searching a NAS space is to leverage supervised learning models for ranking potential neural models, i.e., surrogate predictive models. The predictive model takes the specification of an architecture (or its feature representation) and predicts the probable efficiency of the model ahead of training. Therefore, proper representation of a candidate architecture is an important factor for a predictor NAS approach. While several works have been devoted to training a good surrogate model, there exits limited research focusing on learning a good representation for these neural models. To address this problem, we investigate how to learn a representation with both structural and non-structural features of a network. In particular, we propose a tree structured encoding which permits to fully represent both networks’ layers and their intra-connections. The encoding is easily extendable to larger or more complex structures. Extensive experiments on two NAS datasets, NasBench101 and NasBench201, demonstrate the effectiveness of the proposed method as compared with the state-of-the-art predictors.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhou, Tong; Ren, Shaolei; Xu, Xiaolin
ObfuNAS: A Neural Architecture Search-based DNN Obfuscation Approach Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-08569,
title = {ObfuNAS: A Neural Architecture Search-based DNN Obfuscation Approach},
author = {Tong Zhou and Shaolei Ren and Xiaolin Xu},
url = {https://doi.org/10.48550/arXiv.2208.08569},
doi = {10.48550/arXiv.2208.08569},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.08569},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xiao, Tesi; Xiao, Xia; Chen, Ming; Chen, Youlong
Field-wise Embedding Size Search via Structural Hard Auxiliary Mask Pruning for Click-Through Rate Prediction Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-08004,
title = {Field-wise Embedding Size Search via Structural Hard Auxiliary Mask Pruning for Click-Through Rate Prediction},
author = {Tesi Xiao and Xia Xiao and Ming Chen and Youlong Chen},
url = {https://doi.org/10.48550/arXiv.2208.08004},
doi = {10.48550/arXiv.2208.08004},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.08004},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Bhardwaj, Kartikeya; Ward, James; Tung, Caleb; Gope, Dibakar; Meng, Lingchuan; Fedorov, Igor; Chalfin, Alex; Whatmough, Paul N.; Loh, Danny
Restructurable Activation Networks Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-08562,
title = {Restructurable Activation Networks},
author = {Kartikeya Bhardwaj and James Ward and Caleb Tung and Dibakar Gope and Lingchuan Meng and Igor Fedorov and Alex Chalfin and Paul N. Whatmough and Danny Loh},
url = {https://doi.org/10.48550/arXiv.2208.08562},
doi = {10.48550/arXiv.2208.08562},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.08562},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Xuanyang; Li, Yonggang; Zhang, Xiangyu; Wang, Yongtao; Sun, Jian
Differentiable Architecture Search with Random Features Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-08835,
title = {Differentiable Architecture Search with Random Features},
author = {Xuanyang Zhang and Yonggang Li and Xiangyu Zhang and Yongtao Wang and Jian Sun},
url = {https://doi.org/10.48550/arXiv.2208.08835},
doi = {10.48550/arXiv.2208.08835},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.08835},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Chen, Hanxiong; Li, Yunqi; Zhu, He; Zhang, Yongfeng
Learn Basic Skills and Reuse: Modularized Adaptive Neural Architecture Search (MANAS) Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-11083,
title = {Learn Basic Skills and Reuse: Modularized Adaptive Neural Architecture Search (MANAS)},
author = {Hanxiong Chen and Yunqi Li and He Zhu and Yongfeng Zhang},
url = {https://doi.org/10.48550/arXiv.2208.11083},
doi = {10.48550/arXiv.2208.11083},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.11083},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yu, Zhewen; Bouganis, Christos-Savvas
SVD-NAS: Coupling Low-Rank Approximation and Neural Architecture Search Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-10404,
title = {SVD-NAS: Coupling Low-Rank Approximation and Neural Architecture Search},
author = {Zhewen Yu and Christos-Savvas Bouganis},
url = {https://doi.org/10.48550/arXiv.2208.10404},
doi = {10.48550/arXiv.2208.10404},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.10404},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Jing; Cai, Jianfei; Zhuang, Bohan
FocusFormer: Focusing on What We Need via Architecture Sampler Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-10861,
title = {FocusFormer: Focusing on What We Need via Architecture Sampler},
author = {Jing Liu and Jianfei Cai and Bohan Zhuang},
url = {https://doi.org/10.48550/arXiv.2208.10861},
doi = {10.48550/arXiv.2208.10861},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.10861},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Cao, Xuyang; Chen, Houjin; Li, Yanfeng; Peng, Yahui; Zhou, Yue; Cheng, Lin; Liu, Tianming; Shen, Dinggang
Auto-DenseUNet: Searchable neural network architecture for mass segmentation in 3D automated breast ultrasound Journal Article
In: Medical Image Analysis, pp. 102589, 2022, ISSN: 1361-8415.
@article{CAO2022102589,
title = {Auto-DenseUNet: Searchable neural network architecture for mass segmentation in 3D automated breast ultrasound},
author = {Xuyang Cao and Houjin Chen and Yanfeng Li and Yahui Peng and Yue Zhou and Lin Cheng and Tianming Liu and Dinggang Shen},
url = {https://www.sciencedirect.com/science/article/pii/S1361841522002250},
doi = {https://doi.org/10.1016/j.media.2022.102589},
issn = {1361-8415},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Medical Image Analysis},
pages = {102589},
abstract = {Accurate segmentation of breast mass in 3D automated breast ultrasound (ABUS) plays an important role in breast cancer analysis. Deep convolutional networks have become a promising approach in segmenting ABUS images. However, designing an effective network architecture is time-consuming, and highly relies on specialist’s experience and prior knowledge. To address this issue, we introduce a searchable segmentation network (denoted as Auto-DenseUNet) based on the neural architecture search (NAS) to search the optimal architecture automatically for the ABUS mass segmentation task. Concretely, a novel search space is designed based on a densely connected structure to enhance the gradient and information flows throughout the network. Then, to encourage multiscale information fusion, a set of searchable multiscale aggregation nodes between the down-sampling and up-sampling parts of the network are further designed. Thus, all the operators within the dense connection structure or between any two aggregation nodes can be searched to find the optimal structure. Finally, a novel decoupled search training strategy during architecture search is also introduced to alleviate the memory limitation caused by continuous relaxation in NAS. The proposed Auto-DeseUNet method has been evaluated on our ABUS dataset with 170 volumes (from 107 patients), including 120 training volumes and 50 testing volumes split at patient level. Experimental results on testing volumes show that our searched architecture performed better than several human-designed segmentation models on the 3D ABUS mass segmentation task, indicating the effectiveness of our proposed method.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yuan, Jun; Liu, Mengchen; Tian, Fengyuan; Liu, Shixia
Visual Analysis of Neural Architecture Spaces for Summarizing Design Principles Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-09665,
title = {Visual Analysis of Neural Architecture Spaces for Summarizing Design Principles},
author = {Jun Yuan and Mengchen Liu and Fengyuan Tian and Shixia Liu},
url = {https://doi.org/10.48550/arXiv.2208.09665},
doi = {10.48550/arXiv.2208.09665},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.09665},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xu, Shuhan; Ren, Yudan; Tao, Zeyang; Song, Limei; He, Xiaowei
In: eNeuro, 2022.
@article{XuENEURO.0200-22.2022,
title = {Hierarchical Individual Naturalistic Functional Brain Networks with Group Consistency uncovered by a Two-Stage NAS-Volumetric Sparse DBN Framework},
author = {Shuhan Xu and Yudan Ren and Zeyang Tao and Limei Song and Xiaowei He},
url = {https://www.eneuro.org/content/early/2022/08/19/ENEURO.0200-22.2022},
doi = {10.1523/ENEURO.0200-22.2022},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {eNeuro},
publisher = {Society for Neuroscience},
abstract = {The functional magnetic resonance imaging under naturalistic paradigm (NfMRI) showed great advantages in identifying complex and interactive functional brain networks due to its dynamics and multimodal information. In recent years, various deep learning models, such as deep convolutional autoencoder (DCAE), deep belief network (DBN) and volumetric sparse deep belief network (vsDBN), can obtain hierarchical functional brain networks (FBN) and temporal features from fMRI data. Among them, the vsDBN model revealed a good capability in identifying hierarchical FBNs by modelling fMRI volume images. However, due to the high dimensionality of fMRI volumes and the diverse training parameters of deep learning methods, especially the network architecture that is the most critical parameter for uncovering the hierarchical organization of human brain function, researchers still face challenges in designing an appropriate deep learning framework with automatic network architecture optimization to model volumetric NfMRI. In addition, most of the existing deep learning models ignore the group-wise consistency and inter-subject variation properties embedded in NfMRI volumes. To solve these problems, we proposed a two-stage neural architecture search and vs DBN model (two-stage NAS-vsDBN model) to identify the hierarchical human brain spatio-temporal features possessing both group-consistency and individual-uniqueness under naturalistic condition. Moreover, our model defined reliable network structure for modelling volumetric NfMRI data via NAS framework, and the group-level and individual-level FBNs and associated temporal features exhibited great consistency. In general, our method well identified the hierarchical temporal and spatial features of the brain function and revealed the crucial properties of neural processes under natural viewing condition.Significance StatementIn this paper, we proposed and applied a novel analytical strategy – a two-stage NAS-vsDBN model to identify both group-level and individual-level spatio-temporal features at multi-scales from volumetric NfMRI data. The proposed PSO-based NAS framework can find optimal neural structure for both group-wise and individual-level vs-DBN models. Furthermore, with well-established correspondence between two stages of vsDBN models, our model can effectively detect group-level FBNs that reveal the consistency in neural processes across subjects and individual-level FBNs that maintain the subject specific variability, verifying the inherent property of brain function under naturalistic condition.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Li, Nan; Ma, Lianbo; Yu, Guo; Xue, Bing; Zhang, Mengjie; Jin, Yaochu
Survey on Evolutionary Deep Learning: Principles, Algorithms, Applications and Open Issues Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2208-10658,
title = {Survey on Evolutionary Deep Learning: Principles, Algorithms, Applications and Open Issues},
author = {Nan Li and Lianbo Ma and Guo Yu and Bing Xue and Mengjie Zhang and Yaochu Jin},
url = {https://doi.org/10.48550/arXiv.2208.10658},
doi = {10.48550/arXiv.2208.10658},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2208.10658},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}