Support Vector Machines (SVMs) are well-established
Machine Learning (ML) algorithms. They rely on the fact
that i) linear learning can be formalised as a
well-posed optimisation problem; ii) nonlinear learning
can be brought into linear learning thanks to the
kernel trick and the mapping of the initial search
space onto a high dimensional feature space. The kernel
is designed by the ML expert and it governs the
efficiency of the SVM approach. In this paper, a new
approach for the automatic design of kernels by Genetic
Programming, called the Evolutionary Kernel Machine
(EKM), is presented. EKM combines a well-founded
fitness function inspired from the margin criterion,
and a co-evolution framework ensuring the computational
scalability of the approach. Empirical validation on
standard ML benchmark demonstrates that EKM is
competitive using state-of-the-art SVMs with tuned
hyper-parameters.
PPSN-IX
evolved Kernels are forced to be symmetric functions.
Mercer's condition not enforced, but evolved. 3
co-evolving populations. runtime < 1 hour. Size based
parsimony pressure. Comparison with k-nn nearest
neighbours and SVM, GK-SVM (both with somewhat
optimised parameters). 6 undemanding UCI benchmarks.
%0 Conference Paper
%1 Gagne:PPSN:2006
%A Gagne, Christian
%A Schoenauer, Marc
%A Sebag, Michele
%A Tomassini, Marco
%B Parallel Problem Solving from Nature - PPSN IX
%C Reykjavik, Iceland
%D 2006
%E Runarsson, Thomas Philip
%E Beyer, Hans-Georg
%E Burke, Edmund
%E Merelo-Guervos, Juan J.
%E Whitley, L. Darrell
%E Yao, Xin
%I Springer-Verlag
%K DSS, algorithms, beagle coevolution, genetic hyperheuristic, open programming,
%P 1008--1017
%R doi:10.1007/11844297_102
%T Genetic Programming for Kernel-Based Learning with
Co-evolving Subsets Selection
%U http://ppsn2006.raunvis.hi.is/proceedings/287.pdf
%V 4193
%X Support Vector Machines (SVMs) are well-established
Machine Learning (ML) algorithms. They rely on the fact
that i) linear learning can be formalised as a
well-posed optimisation problem; ii) nonlinear learning
can be brought into linear learning thanks to the
kernel trick and the mapping of the initial search
space onto a high dimensional feature space. The kernel
is designed by the ML expert and it governs the
efficiency of the SVM approach. In this paper, a new
approach for the automatic design of kernels by Genetic
Programming, called the Evolutionary Kernel Machine
(EKM), is presented. EKM combines a well-founded
fitness function inspired from the margin criterion,
and a co-evolution framework ensuring the computational
scalability of the approach. Empirical validation on
standard ML benchmark demonstrates that EKM is
competitive using state-of-the-art SVMs with tuned
hyper-parameters.
%@ 3-540-38990-3
@inproceedings{Gagne:PPSN:2006,
abstract = {Support Vector Machines (SVMs) are well-established
Machine Learning (ML) algorithms. They rely on the fact
that i) linear learning can be formalised as a
well-posed optimisation problem; ii) nonlinear learning
can be brought into linear learning thanks to the
kernel trick and the mapping of the initial search
space onto a high dimensional feature space. The kernel
is designed by the ML expert and it governs the
efficiency of the SVM approach. In this paper, a new
approach for the automatic design of kernels by Genetic
Programming, called the Evolutionary Kernel Machine
(EKM), is presented. EKM combines a well-founded
fitness function inspired from the margin criterion,
and a co-evolution framework ensuring the computational
scalability of the approach. Empirical validation on
standard ML benchmark demonstrates that EKM is
competitive using state-of-the-art SVMs with tuned
hyper-parameters.},
added-at = {2008-06-19T17:35:00.000+0200},
address = {Reykjavik, Iceland},
author = {Gagne, Christian and Schoenauer, Marc and Sebag, Michele and Tomassini, Marco},
biburl = {https://www.bibsonomy.org/bibtex/2eef5a5cc1ab76a498f137868094afb73/brazovayeye},
booktitle = {Parallel Problem Solving from Nature - PPSN IX},
doi = {doi:10.1007/11844297_102},
editor = {Runarsson, Thomas Philip and Beyer, Hans-Georg and Burke, Edmund and Merelo-Guervos, Juan J. and Whitley, L. Darrell and Yao, Xin},
interhash = {f7ca82b6276c6ca7ab1a4c436ec168af},
intrahash = {eef5a5cc1ab76a498f137868094afb73},
isbn = {3-540-38990-3},
keywords = {DSS, algorithms, beagle coevolution, genetic hyperheuristic, open programming,},
month = {9-13 September},
notes = {PPSN-IX
evolved Kernels are forced to be symmetric functions.
Mercer's condition not enforced, but evolved. 3
co-evolving populations. runtime < 1 hour. Size based
parsimony pressure. Comparison with k-nn nearest
neighbours and SVM, GK-SVM (both with somewhat
optimised parameters). 6 undemanding UCI benchmarks.},
pages = {1008--1017},
publisher = {Springer-Verlag},
publisher_address = {Berlin},
series = {LNCS},
size = {10 pages},
timestamp = {2008-06-19T17:40:01.000+0200},
title = {Genetic Programming for Kernel-Based Learning with
Co-evolving Subsets Selection},
url = {http://ppsn2006.raunvis.hi.is/proceedings/287.pdf},
volume = 4193,
year = 2006
}