|||
@ARTICLE{XAQZ14,
author = {Xu, Shuo and An, Xin and Qiao, Xiaodong and Zhu, Lijun},
title = {Multi-Task Least-Squares Support Vector Machines},
journal = {Multimedia Tools and Applications},
year = {2014},
volume = {71},
number = {2},
pages = {699--715},
issn = {1380-7501},
abstract = {There are often the underlying cross relatedness amongst multiple
tasks, which is discarded directly by traditional single-task learning
methods. Since multi-task learning can exploit these relatedness
to further improve the performance, it has attracted extensive attention
in many domains including multimedia. It has been shown through a
meticulous empirical study that the generalization performance of
Least-Squares Support Vector Machine (LS-SVM) is comparable to that
of SVM. In order to generalize LS-SVM from single-task to multi-task
learning, inspired by the regularized multi-task learning (RMTL),
this study proposes a novel multi-task learning approach, multi-task
LS-SVM (MTLS-SVM). Similar to LS-SVM, one only solves a convex linear
system in the training phrase, too. What's more, we unify the classification
and regression problems in an efficient training algorithm, which
effectively employs the Krylow methods. Finally, experimental results
on emph{school} and emph{dermatology} validate the effectiveness
of the proposed approach.},
doi = {10.1007/s11042-013-1526-5},
keywords = {Multi-Task Learning sep Least-Square Support Vector Machine (LS-SVM)
sep Multi-Task LS-SVM (MTLS-SVM) sep Krylow Methods},
}
全文见:XAQZ14.pdf
Archiver|手机版|科学网 ( 京ICP备07017567号-12 )
GMT+8, 2024-12-21 23:39
Powered by ScienceNet.cn
Copyright © 2007- 中国科学报社