@inproceedings{8951ec637b9c47bd840867f644bb9916,
title = "Adversarial Augmentation For Adapter Learning",
abstract = "The recent pre-trained models have achieved state-of-the-art results for natural language understanding (NLU) and automatic speech recognition (ASR). However, the pre-trained models likely suffer from the overfitting problem when adapting the model to a low-resource target domain. This study handles this low-resource setting by training an adversarial adapter based on a pre-trained backbone model. The adversarial training is performed by implementing the data augmentation rather than enhancing the adversarial robustness. The proposed method leverages adversarial training to collect augmented data to reinforce adapter learning with a smoothed decision boundary. The size of trainable parameters is tightly controlled to alleviate the overfitting to enhance the model capability. In the experiments, this work considerably improves the performance in NLU tasks. The adversarial adapter learning is further extended for ASR to show the merit of this method in terms of efficiency and accuracy.",
keywords = "adapter learning, adversarial training, data augmentation, fine-tuning, pre-trained model",
author = "Chien, {Jen Tzung} and Sun, {Wei Yu}",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 2023 IEEE Automatic Speech Recognition and Understanding Workshop, ASRU 2023 ; Conference date: 16-12-2023 Through 20-12-2023",
year = "2023",
doi = "10.1109/ASRU57964.2023.10389727",
language = "English",
series = "2023 IEEE Automatic Speech Recognition and Understanding Workshop, ASRU 2023",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2023 IEEE Automatic Speech Recognition and Understanding Workshop, ASRU 2023",
address = "United States",
}