@inproceedings{e450e89f36c242de81193617bbf17411,
title = "Multitask learning using regularized multiple kernel learning",
abstract = "Empirical success of kernel-based learning algorithms is very much dependent on the kernel function used. Instead of using a single fixed kernel function, multiple kernel learning (MKL) algorithms learn a combination of different kernel functions in order to obtain a similarity measure that better matches the underlying problem. We study multitask learning (MKL) problems and formulate a novel MTL algorithm that trains coupled but nonidentical MKL models across the tasks. The proposed algorithm is especially useful for tasks that have different input and/or output space characteristics and is computationally very efficient. Empirical results on three data sets validate the generalization performance and the efficiency of our approach.",
keywords = "kernel machines, multilabel learning, multiple kernel learning, multitask learning, support vector machines",
author = "Mehmet G{\"o}nen and Melih Kandemir and Samuel Kaski",
year = "2011",
doi = "10.1007/978-3-642-24958-7_58",
language = "English (US)",
isbn = "9783642249570",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
number = "PART 2",
pages = "500--509",
booktitle = "Neural Information Processing - 18th International Conference, ICONIP 2011, Proceedings",
edition = "PART 2",
note = "18th International Conference on Neural Information Processing, ICONIP 2011 ; Conference date: 13-11-2011 Through 17-11-2011",
}