Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2022
Chen, Panyue; Wang, Rui; Zhao, Ping; Liu, Guanming; Wei, Zhihua
Searching Efficient Dynamic Graph CNN for Point Cloud Processing Proceedings Article
In: 1st International Conference on Automated Machine Learning, Late-Breaking Workshop, 2022.
@inproceedings{nokey,
title = {Searching Efficient Dynamic Graph CNN for Point Cloud Processing},
author = {Panyue Chen and Rui Wang and Ping Zhao and Guanming Liu and Zhihua Wei
},
url = {https://automl.cc/wp-content/uploads/2022/07/searching_efficient_dynamic_gr.pdf},
year = {2022},
date = {2022-07-21},
urldate = {2022-07-21},
booktitle = {1st International Conference on Automated Machine Learning, Late-Breaking Workshop},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Tianning; Kee, Chun Yun; Ang, Yee Sin; LI, Erping; Ang, Lay Kee
Symmetry Enhanced Network Architecture Search for Complex Metasurface Design Journal Article
In: IEEE Access, 2022.
@article{ZhangIEEEACCESS2022,
title = {Symmetry Enhanced Network Architecture Search for Complex Metasurface Design},
author = {Tianning Zhang and Chun Yun Kee and Yee Sin Ang and Erping LI and Lay Kee Ang},
url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=9826756},
year = {2022},
date = {2022-07-18},
urldate = {2022-07-18},
journal = {IEEE Access},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gupta, Abhibha; Sheth, Parth; Xie, Pengtao
Neural architecture search for pneumonia diagnosis from chest X-rays Journal Article
In: Scientific Reports, vol. 12, 2022.
@article{Gupta2022,
title = {Neural architecture search for pneumonia diagnosis from chest X-rays},
author = {Abhibha Gupta and Parth Sheth and Pengtao Xie
},
url = {https://www.nature.com/articles/s41598-022-15341-0},
year = {2022},
date = {2022-07-04},
urldate = {2022-07-04},
journal = {Scientific Reports},
volume = {12},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gridin, Ivan
Automated Deep Learning Using Neural Network Intelligence Book
2022, ISBN: 978-1-4842-8148-2.
@book{GridinADLbook,
title = {Automated Deep Learning Using Neural Network Intelligence},
author = { Ivan Gridin },
url = {https://link.springer.com/content/pdf/10.1007/978-1-4842-8149-9.pdf},
isbn = {978-1-4842-8148-2},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
keywords = {},
pubstate = {published},
tppubtype = {book}
}
Liu, Jia; Jin, Yaochu
Bi-fidelity Multi-objective Neural Architecture Search for Adversarial Robustness with Surrogate as a Helper-objective Proceedings Article
In: 2022.
@inproceedings{LiIjCAI2022,
title = {Bi-fidelity Multi-objective Neural Architecture Search for Adversarial Robustness with Surrogate as a Helper-objective},
author = {Jia Liu and Yaochu Jin},
url = {https://federated-learning.org/fl-ijcai-2022/Papers/FL-IJCAI-22_paper_22.pdf},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Fayyazifar, Najmeh
Deep learning and neural architecture search for cardiac arrhythmias classification PhD Thesis
2022.
@phdthesis{FayyazifarPhD,
title = {Deep learning and neural architecture search for cardiac arrhythmias classification},
author = {Najmeh Fayyazifar},
url = {https://ro.ecu.edu.au/theses/2553/},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Wen, Long; Wang, You; Li, Xinyu
A new automatic convolutional neural network based on deep reinforcement learning for fault diagnosis Journal Article
In: Frontiers of Mechanical Engineering, vol. 17, 2022.
@article{WenFME2022,
title = {A new automatic convolutional neural network based on deep reinforcement learning for fault diagnosis},
author = { Long Wen and You Wang and Xinyu Li },
url = {https://link.springer.com/article/10.1007/s11465-022-0673-7},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
journal = {Frontiers of Mechanical Engineering},
volume = {17},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Parallel and Distributed Methods for Autonomous Design of Artificial Neural Networks PhD Thesis
2022.
@phdthesis{GeorgeKyriakides,
title = {Parallel and Distributed Methods for Autonomous Design of Artificial Neural Networks},
url = {https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=&ved=2ahUKEwiXytvG4vX4AhXUXvEDHTLtD3wQFnoECAUQAQ&url=https%3A%2F%2Fdspace.lib.uom.gr%2Fbitstream%2F2159%2F27216%2F5%2FKyriakidesGeorgePhD2022.pdf&usg=AOvVaw3uanEdjCn9YtbbC59ScY2Z},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Sun, Zhenhong; Lin, Ming; Sun, Xiuyu; Tan, Zhiyu; Li, Hao; Jin, Rong
MAE-DET: Revisiting Maximum Entropy Principle in Zero-Shot NAS for Efficient Object Detection Proceedings Article
In: Chaudhuri, Kamalika; Jegelka, Stefanie; Song, Le; Szepesvari, Csaba; Niu, Gang; Sabato, Sivan (Ed.): Proceedings of the 39th International Conference on Machine Learning, pp. 20810–20826, PMLR, 2022.
@inproceedings{pmlr-v162-sun22c,
title = {MAE-DET: Revisiting Maximum Entropy Principle in Zero-Shot NAS for Efficient Object Detection},
author = {Zhenhong Sun and Ming Lin and Xiuyu Sun and Zhiyu Tan and Hao Li and Rong Jin},
editor = {Kamalika Chaudhuri and Stefanie Jegelka and Le Song and Csaba Szepesvari and Gang Niu and Sivan Sabato},
url = {https://proceedings.mlr.press/v162/sun22c.html},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {Proceedings of the 39th International Conference on Machine Learning},
volume = {162},
pages = {20810--20826},
publisher = {PMLR},
series = {Proceedings of Machine Learning Research},
abstract = {In object detection, the detection backbone consumes more than half of the overall inference cost. Recent researches attempt to reduce this cost by optimizing the backbone architecture with the help of Neural Architecture Search (NAS). However, existing NAS methods for object detection require hundreds to thousands of GPU hours of searching, making them impractical in fast-paced research and development. In this work, we propose a novel zero-shot NAS method to address this issue. The proposed method, named MAE-DET, automatically designs efficient detection backbones via the Maximum Entropy Principle without training network parameters, reducing the architecture design cost to nearly zero yet delivering the state-of-the-art (SOTA) performance. Under the hood, MAE-DET maximizes the differential entropy of detection backbones, leading to a better feature extractor for object detection under the same computational budgets. After merely one GPU day of fully automatic design, MAE-DET innovates SOTA detection backbones on multiple detection benchmark datasets with little human intervention. Comparing to ResNet-50 backbone, MAE-DET is $+2.0%$ better in mAP when using the same amount of FLOPs/parameters, and is $1.54$ times faster on NVIDIA V100 at the same mAP. Code and pre-trained models are available here (https://github.com/alibaba/lightweight-neural-architecture-search).},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Sun, Zhenhong; Lin, Ming; Sun, Xiuyu; Tan, Zhiyu; Li, Hao; Jin, Rong
MAE-DET: Revisiting Maximum Entropy Principle in Zero-Shot NAS for Efficient Object Detection Proceedings Article
In: Chaudhuri, Kamalika; Jegelka, Stefanie; Song, Le; Szepesvari, Csaba; Niu, Gang; Sabato, Sivan (Ed.): Proceedings of the 39th International Conference on Machine Learning, pp. 20810–20826, PMLR, 2022.
@inproceedings{pmlr-v162-sun22cb,
title = {MAE-DET: Revisiting Maximum Entropy Principle in Zero-Shot NAS for Efficient Object Detection},
author = {Zhenhong Sun and Ming Lin and Xiuyu Sun and Zhiyu Tan and Hao Li and Rong Jin},
editor = {Kamalika Chaudhuri and Stefanie Jegelka and Le Song and Csaba Szepesvari and Gang Niu and Sivan Sabato},
url = {https://proceedings.mlr.press/v162/sun22c.html},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {Proceedings of the 39th International Conference on Machine Learning},
volume = {162},
pages = {20810--20826},
publisher = {PMLR},
series = {Proceedings of Machine Learning Research},
abstract = {In object detection, the detection backbone consumes more than half of the overall inference cost. Recent researches attempt to reduce this cost by optimizing the backbone architecture with the help of Neural Architecture Search (NAS). However, existing NAS methods for object detection require hundreds to thousands of GPU hours of searching, making them impractical in fast-paced research and development. In this work, we propose a novel zero-shot NAS method to address this issue. The proposed method, named MAE-DET, automatically designs efficient detection backbones via the Maximum Entropy Principle without training network parameters, reducing the architecture design cost to nearly zero yet delivering the state-of-the-art (SOTA) performance. Under the hood, MAE-DET maximizes the differential entropy of detection backbones, leading to a better feature extractor for object detection under the same computational budgets. After merely one GPU day of fully automatic design, MAE-DET innovates SOTA detection backbones on multiple detection benchmark datasets with little human intervention. Comparing to ResNet-50 backbone, MAE-DET is $+2.0%$ better in mAP when using the same amount of FLOPs/parameters, and is $1.54$ times faster on NVIDIA V100 at the same mAP. Code and pre-trained models are available here (https://github.com/alibaba/lightweight-neural-architecture-search).},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Marchisio, Alberto; Mrazek, Vojtech; Massa, Andrea; Bussolino, Beatrice; Martina, Maurizio; Shafique, Muhammad
HARNAS: Neural Architecture Search Jointly Optimizing for Hardware Efficiency and Adversarial Robustness of Convolutional and Capsule Networks Proceedings Article
In: DyNN workshop at the 39th International Conference on Machine Learning, 2022.
@inproceedings{MarchisioDyNN2022,
title = {HARNAS: Neural Architecture Search Jointly Optimizing for Hardware Efficiency and Adversarial Robustness of Convolutional and Capsule Networks},
author = {Alberto Marchisio and Vojtech Mrazek and Andrea Massa and Beatrice Bussolino and Maurizio Martina and Muhammad Shafique},
url = {https://dynn-icml2022.github.io/papers/paper_18.pdf},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {DyNN workshop at the 39th International Conference on Machine Learning},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Malkova, Aleksandra
Reconstruction of Radio Power Maps with Deep Neural Networks in a Weakly Labeled Learning Context PhD Thesis
Universite Grenoble Alpes, 2022.
@phdthesis{malkova:tel-03740720,
title = {Reconstruction of Radio Power Maps with Deep Neural Networks in a Weakly Labeled Learning Context},
author = {Aleksandra Malkova},
url = {https://tel.archives-ouvertes.fr/tel-03740720},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
school = {Universite Grenoble Alpes},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Zhou, J.; Chen, F.; Lu, G.
Pruning Based Training-Free Neural Architecture Search Proceedings Article
In: 2022 IEEE International Conference on Multimedia and Expo (ICME), pp. 1-6, IEEE Computer Society, Los Alamitos, CA, USA, 2022.
@inproceedings{9859732,
title = {Pruning Based Training-Free Neural Architecture Search},
author = {J. Zhou and F. Chen and G. Lu},
url = {https://doi.ieeecomputersociety.org/10.1109/ICME52920.2022.9859732},
doi = {10.1109/ICME52920.2022.9859732},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {2022 IEEE International Conference on Multimedia and Expo (ICME)},
pages = {1-6},
publisher = {IEEE Computer Society},
address = {Los Alamitos, CA, USA},
abstract = {Neural Architecture Search (NAS) plays an important role in searching for high-performance neural networks. How-ever, NAS algorithms are slow and require a terrific amount of computing resources, because they need to be trained on supernet or dense candidate networks to obtain information for evaluation. If the high-performance network architecture could be selected without training, it would eliminate a signif-icant part of the computational cost. Therefore, we propose a zero-cost metric called EX-score, which can represent the ex-pressivity of the network and rank the untrained architectures. To further reduce cost, we design a pruning based zero-cost neural architecture search framework (PZ-NAS) using EX-score. PZ-NAS can prune the initialised supernet rapidly and obtains hundreds of times faster speed performance, whilst archieving comparable accuracy property on CIFAR-IO and ImageNet.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Poliakov, E.; Luu, V.; Tran, V.; Huang, C.
Model Compression via Structural Pruning and Feature Distillation for Accurate Multi-Spectral Object Detection on Edge-Devices Proceedings Article
In: 2022 IEEE International Conference on Multimedia and Expo (ICME), pp. 1-6, IEEE Computer Society, Los Alamitos, CA, USA, 2022.
@inproceedings{9859994,
title = {Model Compression via Structural Pruning and Feature Distillation for Accurate Multi-Spectral Object Detection on Edge-Devices},
author = {E. Poliakov and V. Luu and V. Tran and C. Huang},
url = {https://doi.ieeecomputersociety.org/10.1109/ICME52920.2022.9859994},
doi = {10.1109/ICME52920.2022.9859994},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {2022 IEEE International Conference on Multimedia and Expo (ICME)},
pages = {1-6},
publisher = {IEEE Computer Society},
address = {Los Alamitos, CA, USA},
abstract = {Multi-spectral infrared object detection across different infrared wavelengths is a challenging task. Although some full-sized object detection models, such as YOLOv4 and ScaledYOLO, may achieve good infrared object detection, they are resource-demanding and unsuitable for real-time detection on edge devices. Tiny versions for object detection are proposed to meet the practical requirement, but they usually sacrifice model accuracy and generalization for efficiency. We propose an accurate and efficient object detector capable of performing real-time inference under the hardware constraints of an edge device by leveraging structural pruning, feature distillation, and neural architecture search (NAS). The experiments on FLIR and multi-spectral object detection datasets show that our model achieves comparable mAP to full-sized models while having 14x times fewer parameters and 3.5x times fewer FLOPs. Our model can perform infrared detection well across different infrared wavelengths. The optimal CSPNet configurations of our detection network selected by NAS show that the resulting architectures outperform the baseline.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Liao, W.; Zhang, Z.; Wang, X.; Yin, T.; Liu, H.; Ren, Z.; Yin, J.; Feng, S.
Distilldarts: Network Distillation for Smoothing Gradient Distributions in Differentiable Architecture Search Proceedings Article
In: 2022 IEEE International Conference on Multimedia and Expo (ICME), pp. 1-6, IEEE Computer Society, Los Alamitos, CA, USA, 2022.
@inproceedings{9859708,
title = {Distilldarts: Network Distillation for Smoothing Gradient Distributions in Differentiable Architecture Search},
author = {W. Liao and Z. Zhang and X. Wang and T. Yin and H. Liu and Z. Ren and J. Yin and S. Feng},
url = {https://doi.ieeecomputersociety.org/10.1109/ICME52920.2022.9859708},
doi = {10.1109/ICME52920.2022.9859708},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
booktitle = {2022 IEEE International Conference on Multimedia and Expo (ICME)},
pages = {1-6},
publisher = {IEEE Computer Society},
address = {Los Alamitos, CA, USA},
abstract = {Recent studies show that differentiable architecture search (DARTS) suffers notable instability and collapse issue: skip-connect may gradually dominate the cell, leading to deteri-orating architectures. We conjecture that the domination of skip-connect is due to its superiority in gradient compen-sate. On this foundation, we propose a novel and stable method, called DistillDARTS, to stabilize DARTS by knowl-edge distillation and self-distillation scheme. Specifically, the distillation is able to serve as a substitute for skip-connect and smooth the back-propagated gradient distributions among layers of DARTS. By compensating gradients in shallow lay-ers, our method can relieve the dependence of gradient on skip-connect and hence mitigates the collapse issue. Exten-sive experiments on a range of benchmarks demonstrate that DistillDARTS can obtain sturdy architectures with few skip-connects without additional manual interventions, thus suc-cessfully improving the robustness of DARTS. Due to the im-proved stability, our proposed approach achieves the accuracy of 97.57% on CIFAR-10 and 75.8% on ImageNet.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chu, Jianjun; Yu, Xiaoshan; Yang, Shangshang; Qiu, Jianfeng; Wang, Qijun
Architecture entropy sampling-based evolutionary neural architecture search and its application in osteoporosis diagnosis Journal Article
In: Complex & Intelligent Systems , 2022.
@article{ChuCIS2022,
title = {Architecture entropy sampling-based evolutionary neural architecture search and its application in osteoporosis diagnosis},
author = {
Jianjun Chu and Xiaoshan Yu and Shangshang Yang and Jianfeng Qiu and Qijun Wang
},
url = {https://link.springer.com/article/10.1007/s40747-022-00794-7},
year = {2022},
date = {2022-06-25},
urldate = {2022-06-25},
journal = {Complex & Intelligent Systems },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Sapra, Dolly
Efficient neural architectures for edge devices Supervisors PhD Thesis
2022.
@phdthesis{SapraPhD2022,
title = {Efficient neural architectures for edge devices Supervisors},
author = {Dolly Sapra},
url = {https://dare.uva.nl/search?identifier=03eff2c1-b5ab-4fc8-bfe6-046c0a92941b},
year = {2022},
date = {2022-06-15},
urldate = {2022-06-15},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Pinos, Michal; Mrazek, Vojtech; Sekanina, Lukas
Evolutionary approximation and neural architecture search Journal Article
In: Genet Program Evolvable Mach (2022), 2022.
@article{Pinos2022,
title = {Evolutionary approximation and neural architecture search},
author = {Michal Pinos and Vojtech Mrazek and Lukas Sekanina },
url = {https://doi.org/10.1007/s10710-022-09441-z},
year = {2022},
date = {2022-06-11},
urldate = {2022-06-11},
journal = { Genet Program Evolvable Mach (2022)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rosero, Santiago Vladimir Gomez
Evolving Deep Neural Network Architectures for Time Series Data PhD Thesis
2022.
@phdthesis{RoseroPHD2022,
title = {Evolving Deep Neural Network Architectures for Time Series Data},
author = {Santiago Vladimir Gomez Rosero},
url = {https://ir.lib.uwo.ca/etd/8555/},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Lima, Ricardo H. R.; Magalhães, Dimmy; Pozo, Aurora; Mendiburu, Alexander; Santana, Roberto
A grammar-based GP approach applied to the design of deep neural networks Journal Article
In: Genetic Programming and Evolvable Machines , 2022.
@article{LimaGPEM2022,
title = {A grammar-based GP approach applied to the design of deep neural networks},
author = {
Ricardo H. R. Lima and Dimmy Magalhães and Aurora Pozo and Alexander Mendiburu and Roberto Santana
},
url = {https://link.springer.com/article/10.1007/s10710-022-09432-0},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
journal = {Genetic Programming and Evolvable Machines },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hendrickx, Lotte; Ranst, Wiebe Van; Goedemé, Toon
Hot-Started NAS for Task-Specific Embedded Applications Proceedings Article
In: Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshops, pp. 1971-1978, 2022.
@inproceedings{Hendrickx_2022_CVPR,
title = {Hot-Started NAS for Task-Specific Embedded Applications},
author = {Lotte Hendrickx and Wiebe Van Ranst and Toon Goedemé},
url = {https://openaccess.thecvf.com/content/CVPR2022W/NAS/papers/Hendrickx_Hot-Started_NAS_for_Task-Specific_Embedded_Applications_CVPRW_2022_paper.pdf},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshops},
pages = {1971-1978},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pinos, Michal; Mrazek, Vojtech; Sekanina, Lukas
Evolutionary approximation and neural architecture search Journal Article
In: Genetic Programming and Evolvable Machines, 2022.
@article{Pinos2022b,
title = {Evolutionary approximation and neural architecture search},
author = {Michal Pinos and Vojtech Mrazek and Lukas Sekanina
},
editor = {Maria -},
url = {https://link.springer.com/article/10.1007/s10710-022-09441-z},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
journal = {Genetic Programming and Evolvable Machines},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
YANG, Yibo; SHEN, Zhengyang; LI, Huan; LIN, Zhouchen
Optimization-Inspired Manual Architecture Design and Neural Architecture Search Journal Article
In: SCIENCE CHINA Information Sciences, 2022.
@article{yangoptimization,
title = {Optimization-Inspired Manual Architecture Design and Neural Architecture Search},
author = {Yibo YANG and Zhengyang SHEN and Huan LI and Zhouchen LIN},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
journal = {SCIENCE CHINA Information Sciences},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yue, Zhixiong; Guo, Pengxin; Zhang, Yu; Liang, Jie
Learning Feature Alignment Architecture for Domain Adaptation Proceedings Article
In: IJCNN 2022, 2022.
@inproceedings{YueIJCNN2022,
title = {Learning Feature Alignment Architecture for Domain Adaptation},
author = {Zhixiong Yue and Pengxin Guo and Yu Zhang and Jie Liang},
url = {https://yuezhixiong.github.io/Papers/AASPC.pdf},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {IJCNN 2022},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ayman, Afiya; Martinez, Juan; Pugliese, Philip; Dubey, Abhishek; Laszka, Aron
Neural Architecture and Feature Search for Predicting the Ridership of Public Transportation Routes Proceedings Article
In: proceedings of the 8th IEEE International Conference on Smart Computing (SMARTCOMP 2022), 2022.
@inproceedings{Ayman2022,
title = {Neural Architecture and Feature Search for Predicting the Ridership of Public Transportation Routes},
author = {Afiya Ayman and Juan Martinez and Philip Pugliese and Abhishek Dubey and Aron Laszka},
url = {https://aronlaszka.com/papers/ayman2022neural.pdf},
year = {2022},
date = {2022-05-01},
booktitle = {proceedings of the 8th IEEE International Conference on Smart Computing (SMARTCOMP 2022)},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
He, Zhimin; Su, Junjian; Chen, Chuangtao; Pan, Minghua; Situ, Haozhen
Search space pruning for quantum architecture search Proceedings Article
In: The European Physical Journal Plus , 2022.
@inproceedings{HeEPJP2022,
title = {Search space pruning for quantum architecture search},
author = {Zhimin He and Junjian Su and Chuangtao Chen and Minghua Pan and Haozhen Situ },
url = {https://link.springer.com/article/10.1140/epjp/s13360-022-02714-7},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
booktitle = {The European Physical Journal Plus },
volume = {137},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Xinbang; Jin, Qizhao; Yu, Tingzhao; Xiang, Shiming; Kuang, Qiuming; Prinet, Véronique; Pan, Chunhong
Multi-modal spatio-temporal meteorological forecasting with deep neural network Proceedings Article
In: ISPRS journal of photogrammetry and remote sensing, 2022.
@inproceedings{ZhangMulti2022,
title = {Multi-modal spatio-temporal meteorological forecasting with deep neural network},
author = {Xinbang Zhang and Qizhao Jin and Tingzhao Yu and Shiming Xiang and Qiuming Kuang and Véronique Prinet and Chunhong Pan},
url = {https://pubag.nal.usda.gov/catalog/7745216},
year = {2022},
date = {2022-05-01},
booktitle = {ISPRS journal of photogrammetry and remote sensing},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Xinbang; Jin, Qizhao; Yu, Tingzhao; Xiang, Shiming; Kuang, Qiuming; Prinet, Véronique; Pan, Chunhong
Multi-modal spatio-temporal meteorological forecasting with deep neural network Proceedings Article
In: ISPRS journal of photogrammetry and remote sensing, 2022.
@inproceedings{ZhangMulti2022b,
title = {Multi-modal spatio-temporal meteorological forecasting with deep neural network},
author = {Xinbang Zhang and Qizhao Jin and Tingzhao Yu and Shiming Xiang and Qiuming Kuang and Véronique Prinet and Chunhong Pan},
url = {https://pubag.nal.usda.gov/catalog/7745216},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
booktitle = {ISPRS journal of photogrammetry and remote sensing},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Xinbang; Jin, Qizhao; Yu, Tingzhao; Xiang, Shiming; Kuang, Qiuming; Prinet, Véronique; Pan, Chunhong
Multi-modal spatio-temporal meteorological forecasting with deep neural network Proceedings Article
In: ISPRS journal of photogrammetry and remote sensing, 2022.
@inproceedings{ZhangMulti2022c,
title = {Multi-modal spatio-temporal meteorological forecasting with deep neural network},
author = {Xinbang Zhang and Qizhao Jin and Tingzhao Yu and Shiming Xiang and Qiuming Kuang and Véronique Prinet and Chunhong Pan},
url = {https://pubag.nal.usda.gov/catalog/7745216},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
booktitle = {ISPRS journal of photogrammetry and remote sensing},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Xiao; Lei, Lin; Zhang, Caiguang; Kuang, Gangyao
Multimodal Semantic Consistency-Based Fusion Architecture Search for Land Cover Classification Technical Report
2022.
@techreport{Li2022,
title = {Multimodal Semantic Consistency-Based Fusion Architecture Search for Land Cover Classification},
author = {Xiao Li and Lin Lei and Caiguang Zhang and Gangyao Kuang },
url = {https://www.researchgate.net/profile/Xiao-Li-120/publication/360165422_Multimodal_Semantic_Consistency-Based_Fusion_Architecture_Search_for_Land_Cover_Classification/links/62661867ee24725b3ec4b841/Multimodal-Semantic-Consistency-Based-Fusion-Architecture-Search-for-Land-Cover-Classification.pdf},
year = {2022},
date = {2022-05-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Kishore, Jaydeep; Mukherjee, Snehasis
Autotuned Classication Based on Knowledge Transferred from Self-supervised Models Technical Report
2022.
@techreport{Kishore20222,
title = {Autotuned Classication Based on Knowledge Transferred from Self-supervised Models},
author = {Jaydeep Kishore and Snehasis Mukherjee },
url = {https://www.researchsquare.com/article/rs-1600269/v1},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yan, Chengrun
Automated Machine Learning Under Resource Constraints PhD Thesis
2022.
@phdthesis{YanPHD2022,
title = {Automated Machine Learning Under Resource Constraints },
author = {Chengrun Yan},
url = {https://people.ece.cornell.edu/cy/_papers/chengrun_phd_thesis.pdf},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Louati, Hassen; Bechikh, Slim; Louati, Ali; Aldaej, Abdulaziz; Said, Lamjed Ben
Joint design and compression of convolutional neural networks as a Bi-level optimization problem Journal Article
In: Neural Computing and Applications , 2022.
@article{Louati2022,
title = {Joint design and compression of convolutional neural networks as a Bi-level optimization problem},
author = {Hassen Louati and Slim Bechikh and Ali Louati and Abdulaziz Aldaej and Lamjed Ben Said },
url = {https://link.springer.com/article/10.1007/s00521-022-07331-0},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {Neural Computing and Applications },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Luo, Jiapeng; Wang, Zhongfeng
Automatically search an optimal face detector for a specific deployment environment Journal Article
In: EURASIP Journal on Advances in Signal Processing, 2022.
@article{LuoEURASIP2022,
title = {Automatically search an optimal face detector for a specific deployment environment},
author = {Jiapeng Luo and Zhongfeng Wang
},
url = {https://link.springer.com/article/10.1186/s13634-022-00868-1},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {EURASIP Journal on Advances in Signal Processing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kung, S. Y.
XNAS: A Regressive/Progressive NAS for Deep Learning Journal Article
In: ACM Trans. Sen. Netw., 2022, ISSN: 1550-4859, (Just Accepted).
@article{10.1145/3543669,
title = {XNAS: A Regressive/Progressive NAS for Deep Learning},
author = {S. Y. Kung},
url = {https://doi.org/10.1145/3543669},
doi = {10.1145/3543669},
issn = {1550-4859},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {ACM Trans. Sen. Netw.},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {Deep learning has achieved great and broad breakthroughs in many real-world applications. In particular, the task of training the network parameters has been masterly handled by the Back-Propagation (BP) learning. However, the pursuit on optimal network structures remains largely an art of trial-and-error. This prompts some urgency to explore an architecture engineering process, collectively known as “Neural Architecture Search” (NAS). In general, NAS is a design software system for automating the search of effective neural architecture. This paper proposes a X-learning NAS (XNAS) to automatically train network’s structure and parameters. Our theoretical footing is built upon the subspace and correlation analyses between the input layer, hidden layer, and output layer. The design strategy hinges upon the underlying principle that the network should be coerced to learn how to structurally improve the input/output correlation successively (i.e. layer by layer). It embraces both PNAS and RNAS, short for Progressive and Regressive NAS respectively. For unsupervised RNAS, PCA (Principal Component Analysis) is a classic tool for subspace analyses. By further incorporating teacher’s guidance, PCA can be extended to Regression Component Analysis (RCA) to facilitate supervised NAS design. This allows the machine to extract components most critical to the targeted learning objective. We shall further extend the subspace analysis from MLPs to CNNs, via introduction of Convolutional-PCA (CPCA) or, more simply, Deep-PCA (DPCA). The supervised variant of DPCA will be named Deep-RCA (DRCA). The subspace analyses allow us to compute optimal eigenvectors (resp. eigen-filters) and principal components (resp. eigen-channels) for optimal NAS design of MLPs (resp. CNNs). Based on the theoretical analysis, a X-Learning paradigm is developed to jointly learn the structure and parameters of learning models. The objective is to reduce the network complexity while retaining (and sometimes improving) the performance. With carefully pre-selected baseline models, X-Learning has shown great successes in numerous classification-type and/or regression-type applications. We have applied X-learning to the ImageNet datasets for classification and div2k for image enhancements By applying X-Learning to two types of baseline models, MobileNet and ResNet, both the low-power and high-performance application categories can be supported. Our X-learning experiments include the ImageNet datasets for classification and div2k for image enhancements. Our simulations confirm that X-learning is by and large very competitive relative to the state-of-the-arts approaches.},
note = {Just Accepted},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
AL-Ghamdi, Abdullah S. AL-Malaise; Ragab, Mahmoud; AlGhamdi, Saad Abdulla; Asseri, Amer H.; ans Deepika Koundal, Romany F. Mansour
Cognitive Computing Paradigms for Medical Big Data Processing and its Trends Journal Article
In: Computational Intelligence and Neuroscience, 2022.
@article{GhamdiCCP2022,
title = {Cognitive Computing Paradigms for Medical Big Data Processing and its Trends},
author = {Abdullah S. AL-Malaise AL-Ghamdi and Mahmoud Ragab and Saad Abdulla AlGhamdi and Amer H. Asseri and Romany F. Mansour ans Deepika Koundal},
url = {https://www.hindawi.com/journals/cin/2022/3500552/},
year = {2022},
date = {2022-04-30},
urldate = {2022-04-30},
journal = {Computational Intelligence and Neuroscience},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xiao, Anqi; Shen, Biluo; Shi, Xiaojing; Zhang, Zhe; Zhang, Zeyu; Tian, Jie; Ji, Nan; Hu, Zhenhua
Intraoperative Glioma Grading Using Neural Architecture Search and Multi-modal Imaging Journal Article
In: IEEE Engineering in Medicine and Biology Society , 2022.
@article{Xiao2022,
title = { Intraoperative Glioma Grading Using Neural Architecture Search and Multi-modal Imaging },
author = {Anqi Xiao and Biluo Shen and Xiaojing Shi and Zhe Zhang and Zeyu Zhang and Jie Tian and Nan Ji and Zhenhua Hu
},
url = {https://pubmed.ncbi.nlm.nih.gov/35404810/},
year = {2022},
date = {2022-04-11},
journal = { IEEE Engineering in Medicine and Biology Society },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Spurlock, Kyle; Elgazzar, Heba
A genetic mixed-integer optimization of neural network hyper-parameters Journal Article
In: The Journal of Supercomputing, 2022.
@article{Kyle2022,
title = {A genetic mixed-integer optimization of neural network hyper-parameters},
author = {Spurlock, Kyle and Elgazzar, Heba},
url = {https://doi.org/10.1007/s11227-022-04475-7},
doi = { 10.1007/s11227-022-04475-7},
year = {2022},
date = {2022-04-07},
urldate = {2022-04-07},
journal = {The Journal of Supercomputing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
N., Sasikaladevi; A., Revathi
Neural Architecture Search Network for the Diagnosis of COVID From the Radiographic Images Journal Article
In: Applications of Computational Science in Artificial Intelligence, 2022.
@article{Sasikaladevi2022,
title = {Neural Architecture Search Network for the Diagnosis of COVID From the Radiographic Images},
author = {N., Sasikaladevi and Revathi A. },
url = {https://www.igi-global.com/chapter/neural-architecture-search-network-for-the-diagnosis-of-covid-from-the-radiographic-images/302062},
year = {2022},
date = {2022-04-01},
urldate = {2022-04-01},
journal = {Applications of Computational Science in Artificial Intelligence},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chen, Luoyu
A Progressive and Joint Method for Micro and Macro Architecture Search Journal Article
In: Journal of Physics: Conference Series, vol. 2253, no. 1, pp. 012019, 2022.
@article{Chen_2022,
title = {A Progressive and Joint Method for Micro and Macro Architecture Search},
author = {Luoyu Chen},
url = {https://doi.org/10.1088/1742-6596/2253/1/012019},
doi = {10.1088/1742-6596/2253/1/012019},
year = {2022},
date = {2022-04-01},
urldate = {2022-04-01},
journal = {Journal of Physics: Conference Series},
volume = {2253},
number = {1},
pages = {012019},
publisher = {IOP Publishing},
abstract = {Recently, neural architecture search (NAS) has achieved great success in design neural architectures automatically. Differentiable architecture search (DARTS) has succeeded in reducing computational cost and making the searching process efficient. Based on the cell-based search space in DARTS, we extend the search space by searching diverse cells at different stages and various connection between cells. We also use a progressive and joint method to search the micro and macro architecture together. Extensive experiments on CIFAR-10 demostrate that we can obtain a better result than the original search space.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yang, Ying; Zhang, Xu; Pan, Hu
Path-Based Computation Encoder for Neural Architecture Search Journal Article
In: Journal of Information Processing Systems , vol. 18, 2022.
@article{YangPBCE2022,
title = {Path-Based Computation Encoder for Neural Architecture Search},
author = {Ying Yang and Xu Zhang and Hu Pan},
url = {http://jips-k.org/digital-library/2022/18/2/188},
year = {2022},
date = {2022-04-01},
urldate = {2022-04-01},
journal = { Journal of Information Processing Systems },
volume = {18},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Du, Yuxuan; Huang, Tao; You, Shan; Hsieh, Min-Hsiu; Tao, Dacheng
Quantum circuit architecture search for variational quantum algorithms Journal Article
In: npj Quantum Information volume, 2022.
@article{nokey,
title = {Quantum circuit architecture search for variational quantum algorithms},
author = { Yuxuan Du and Tao Huang and Shan You and Min-Hsiu Hsieh and Dacheng Tao
},
url = {https://www.nature.com/articles/s41534-022-00570-y},
year = {2022},
date = {2022-03-23},
urldate = {2022-03-23},
journal = {npj Quantum Information volume},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Meng, Hang; Li, Fei; Yang, Zhen; Zhu, Qiang; Fan, Chaogang; Zhan, Shu
Salient Patch Based NAS for Grading of Colorectal Cancer Histology Images Proceedings Article
In: The 13th International Multi-Conference on Complexity, Informatics and Cybernetics: IMCIC 2022, 2022.
@inproceedings{MengIMCIC2022,
title = {Salient Patch Based NAS for Grading of Colorectal Cancer Histology Images},
author = { Hang Meng and Fei Li and Zhen Yang and Qiang Zhu and Chaogang Fan and Shu Zhan },
url = {https://www.iiis.org/DOI2022/ZA922IZ/#/},
year = {2022},
date = {2022-03-08},
urldate = {2022-03-08},
booktitle = {The 13th International Multi-Conference on Complexity, Informatics and Cybernetics: IMCIC 2022},
volume = {978-1-950492-61-9 },
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Yong; Qiao, Xiaobin; Wang, Gai-Ge
Architecture evolution of convolutional neural network using monarch butterfly optimization Journal Article
In: Journal of Ambient Intelligence and Humanized Computing , 2022.
@article{Wang2022,
title = {Architecture evolution of convolutional neural network using monarch butterfly optimization},
author = {Yong Wang and Xiaobin Qiao and Gai-Ge Wang },
url = {https://link.springer.com/article/10.1007/s12652-022-03766-4},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
journal = {Journal of Ambient Intelligence and Humanized Computing },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cai, Han; Lin, Ji; Lin, Yujun; Liu, Zhijian; Tang, Haotian; Wang, Hanrui; Zhu, Ligeng; Han, Song
Enable Deep Learning on Mobile Devices: Methods, Systems, and Applications Journal Article
In: ACM Trans. Des. Autom. Electron. Syst., vol. 27, no. 3, 2022, ISSN: 1084-4309.
@article{10.1145/3486618,
title = {Enable Deep Learning on Mobile Devices: Methods, Systems, and Applications},
author = {Han Cai and Ji Lin and Yujun Lin and Zhijian Liu and Haotian Tang and Hanrui Wang and Ligeng Zhu and Song Han},
url = {https://doi.org/10.1145/3486618},
doi = {10.1145/3486618},
issn = {1084-4309},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
journal = {ACM Trans. Des. Autom. Electron. Syst.},
volume = {27},
number = {3},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {Deep neural networks (DNNs) have achieved unprecedented success in the field of artificial intelligence (AI), including computer vision, natural language processing, and speech recognition. However, their superior performance comes at the considerable cost of computational complexity, which greatly hinders their applications in many resource-constrained devices, such as mobile phones and Internet of Things (IoT) devices. Therefore, methods and techniques that are able to lift the efficiency bottleneck while preserving the high accuracy of DNNs are in great demand to enable numerous edge AI applications. This article provides an overview of efficient deep learning methods, systems, and applications. We start from introducing popular model compression methods, including pruning, factorization, quantization, as well as compact model design. To reduce the large design cost of these manual solutions, we discuss the AutoML framework for each of them, such as neural architecture search (NAS) and automated pruning and quantization. We then cover efficient on-device training to enable user customization based on the local data on mobile devices. Apart from general acceleration techniques, we also showcase several task-specific accelerations for point cloud, video, and natural language processing by exploiting their spatial sparsity and temporal/token redundancy. Finally, to support all these algorithmic advancements, we introduce the efficient deep learning system design from both software and hardware perspectives.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ding, Yadong; Wu, Yu; Huang, Chengyue; Tang, Siliang; Yang, Yi; Wei, Longhui; Zhuang, Yueting; Tian, Qi
Learning to Learn by Jointly Optimizing Neural Architecture and Weights Proceedings Article
In: CVPR2022, 2022.
@inproceedings{DingCVPR2022,
title = {Learning to Learn by Jointly Optimizing Neural Architecture and Weights},
author = {Yadong Ding and Yu Wu and Chengyue Huang and Siliang Tang and Yi Yang and Longhui Wei and Yueting Zhuang and Qi Tian },
url = {https://yu-wu.net/pdf/CVPR22_CAML.pdf},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
booktitle = {CVPR2022},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Xu, Sean; Li, Yihui; Zhang, Yice; Xu, Ruifeng; Li, Jianxin; Shi, Guozhong; Hu, Feiran
Contrastive Learning for Multiple Models in One Supernet Proceedings Article
In: Cognitive Computing – ICCC 2021, 2022.
@inproceedings{Xu2022,
title = {Contrastive Learning for Multiple Models in One Supernet},
author = {Sean Xu and Yihui Li and Yice Zhang and Ruifeng Xu and Jianxin Li and Guozhong Shi and Feiran Hu
},
url = {https://rd.springer.com/chapter/10.1007/978-3-030-96419-1_2},
year = {2022},
date = {2022-02-10},
urldate = {2022-02-10},
booktitle = {Cognitive Computing – ICCC 2021},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jia, Liang; Tian, Ye; Zhang, Junguo
Compression of Deep Learning Models for Resource-Constrained Devices Journal Article
In: Computational Intelligence and Neuroscience, vol. 2022, 2022.
@article{Jia2022,
title = {Compression of Deep Learning Models for Resource-Constrained Devices},
author = {Liang Jia and Ye Tian and Junguo Zhang},
url = {https://www.hindawi.com/journals/cin/2022/8615374/},
year = {2022},
date = {2022-02-02},
urldate = {2022-02-02},
journal = {Computational Intelligence and Neuroscience},
volume = {2022},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Guo, Hongwei; Zhuang, Xiaoying; Chen, Pengwan; Alajlan, Naif; Rabczuk, Timon
Stochastic deep collocation method based on neural architecture search and transfer learning for heterogeneous porous media Journal Article
In: Engineering with Computers , 2022.
@article{GuoEC2022,
title = {Stochastic deep collocation method based on neural architecture search and transfer learning for heterogeneous porous media},
author = {Hongwei Guo and Xiaoying Zhuang and Pengwan Chen and Naif Alajlan and Timon Rabczuk
},
url = {https://doi.org/10.1007/s00366-021-01586-2},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
journal = {Engineering with Computers },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yao, Xiao; Li, Fang; Zeng, Yifeng
Relational structure predictive neural architecture search for multimodal fusion Journal Article
In: Soft Computing, 2022.
@article{Yao2022,
title = {Relational structure predictive neural architecture search for multimodal fusion},
author = {Xiao Yao and Fang Li and Yifeng Zeng
},
url = {https://link.springer.com/article/10.1007/s00500-022-06772-y},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
journal = {Soft Computing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}