Multiple kernel learning (MKL) is based on convex combinations of arbitrary kernels over potentially different domains.
where \(\beta_k > 0\), \(\sum_{k=1}^{K} \beta_k = 1\), \(K\) is the number of sub-kernels, \(\bf{k}\) is a combined kernel, \({\bf k}_i\) is an individual kernel and \({x_i}_i\) are the training data.
Regression is done by using CSVMLight. See Support Vector Regression for more details.
See [SRatschSchaferScholkopf06] for more information about MKL.
Imagine we have files with training and test data. We create CDenseFeatures (here 64 bit floats aka RealFeatures) and CRegressionLabels as
features_train = RealFeatures(f_feats_train)
features_test = RealFeatures(f_feats_test)
labels_train = RegressionLabels(f_labels_train)
labels_test = RegressionLabels(f_labels_test)
features_train = RealFeatures(f_feats_train);
features_test = RealFeatures(f_feats_test);
labels_train = RegressionLabels(f_labels_train);
labels_test = RegressionLabels(f_labels_test);
RealFeatures features_train = new RealFeatures(f_feats_train);
RealFeatures features_test = new RealFeatures(f_feats_test);
RegressionLabels labels_train = new RegressionLabels(f_labels_train);
RegressionLabels labels_test = new RegressionLabels(f_labels_test);
features_train = Modshogun::RealFeatures.new f_feats_train
features_test = Modshogun::RealFeatures.new f_feats_test
labels_train = Modshogun::RegressionLabels.new f_labels_train
labels_test = Modshogun::RegressionLabels.new f_labels_test
features_train <- RealFeatures(f_feats_train)
features_test <- RealFeatures(f_feats_test)
labels_train <- RegressionLabels(f_labels_train)
labels_test <- RegressionLabels(f_labels_test)
RealFeatures features_train = new RealFeatures(f_feats_train);
RealFeatures features_test = new RealFeatures(f_feats_test);
RegressionLabels labels_train = new RegressionLabels(f_labels_train);
RegressionLabels labels_test = new RegressionLabels(f_labels_test);
auto features_train = some<CDenseFeatures<float64_t>>(f_feats_train);
auto features_test = some<CDenseFeatures<float64_t>>(f_feats_test);
auto labels_train = some<CRegressionLabels>(f_labels_train);
auto labels_test = some<CRegressionLabels>(f_labels_test);
Then we create indvidual kernels like CPolyKernel and CGaussianKernel which will be later combined in one CCombinedKernel.
poly_kernel = PolyKernel(10, 2)
gauss_kernel_1 = GaussianKernel(2.0)
gauss_kernel_2 = GaussianKernel(3.0)
poly_kernel = PolyKernel(10, 2);
gauss_kernel_1 = GaussianKernel(2.0);
gauss_kernel_2 = GaussianKernel(3.0);
PolyKernel poly_kernel = new PolyKernel(10, 2);
GaussianKernel gauss_kernel_1 = new GaussianKernel(2.0);
GaussianKernel gauss_kernel_2 = new GaussianKernel(3.0);
poly_kernel = Modshogun::PolyKernel.new 10, 2
gauss_kernel_1 = Modshogun::GaussianKernel.new 2.0
gauss_kernel_2 = Modshogun::GaussianKernel.new 3.0
poly_kernel <- PolyKernel(10, 2)
gauss_kernel_1 <- GaussianKernel(2.0)
gauss_kernel_2 <- GaussianKernel(3.0)
PolyKernel poly_kernel = new PolyKernel(10, 2);
GaussianKernel gauss_kernel_1 = new GaussianKernel(2.0);
GaussianKernel gauss_kernel_2 = new GaussianKernel(3.0);
auto poly_kernel = some<CPolyKernel>(10, 2);
auto gauss_kernel_1 = some<CGaussianKernel>(2.0);
auto gauss_kernel_2 = some<CGaussianKernel>(3.0);
We create an instance of CCombinedKernel and append the CKernel objects.
combined_kernel = CombinedKernel()
combined_kernel.append_kernel(poly_kernel)
combined_kernel.append_kernel(gauss_kernel_1)
combined_kernel.append_kernel(gauss_kernel_2)
combined_kernel.init(features_train, features_train)
combined_kernel = CombinedKernel();
combined_kernel.append_kernel(poly_kernel);
combined_kernel.append_kernel(gauss_kernel_1);
combined_kernel.append_kernel(gauss_kernel_2);
combined_kernel.init(features_train, features_train);
CombinedKernel combined_kernel = new CombinedKernel();
combined_kernel.append_kernel(poly_kernel);
combined_kernel.append_kernel(gauss_kernel_1);
combined_kernel.append_kernel(gauss_kernel_2);
combined_kernel.init(features_train, features_train);
combined_kernel = Modshogun::CombinedKernel.new
combined_kernel.append_kernel poly_kernel
combined_kernel.append_kernel gauss_kernel_1
combined_kernel.append_kernel gauss_kernel_2
combined_kernel.init features_train, features_train
combined_kernel <- CombinedKernel()
combined_kernel$append_kernel(poly_kernel)
combined_kernel$append_kernel(gauss_kernel_1)
combined_kernel$append_kernel(gauss_kernel_2)
combined_kernel$init(features_train, features_train)
CombinedKernel combined_kernel = new CombinedKernel();
combined_kernel.append_kernel(poly_kernel);
combined_kernel.append_kernel(gauss_kernel_1);
combined_kernel.append_kernel(gauss_kernel_2);
combined_kernel.init(features_train, features_train);
auto combined_kernel = some<CCombinedKernel>();
combined_kernel->append_kernel(poly_kernel);
combined_kernel->append_kernel(gauss_kernel_1);
combined_kernel->append_kernel(gauss_kernel_2);
combined_kernel->init(features_train, features_train);
CMKLRegression needs an SVM solver as input. We here use SVMLight. We create an object of SVMLight and CMKLRegression, provide the combined kernel and labels before training it.
binary_svm_solver = SVRLight()
mkl = MKLRegression(binary_svm_solver)
mkl.set_kernel(combined_kernel)
mkl.set_labels(labels_train)
mkl.train()
binary_svm_solver = SVRLight();
mkl = MKLRegression(binary_svm_solver);
mkl.set_kernel(combined_kernel);
mkl.set_labels(labels_train);
mkl.train();
SVRLight binary_svm_solver = new SVRLight();
MKLRegression mkl = new MKLRegression(binary_svm_solver);
mkl.set_kernel(combined_kernel);
mkl.set_labels(labels_train);
mkl.train();
binary_svm_solver = Modshogun::SVRLight.new
mkl = Modshogun::MKLRegression.new binary_svm_solver
mkl.set_kernel combined_kernel
mkl.set_labels labels_train
mkl.train
binary_svm_solver <- SVRLight()
mkl <- MKLRegression(binary_svm_solver)
mkl$set_kernel(combined_kernel)
mkl$set_labels(labels_train)
mkl$train()
SVRLight binary_svm_solver = new SVRLight();
MKLRegression mkl = new MKLRegression(binary_svm_solver);
mkl.set_kernel(combined_kernel);
mkl.set_labels(labels_train);
mkl.train();
auto binary_svm_solver = some<CSVRLight>();
auto mkl = some<CMKLRegression>(binary_svm_solver);
mkl->set_kernel(combined_kernel);
mkl->set_labels(labels_train);
mkl->train();
After training, we can extract \(\beta\) and SVM coefficients \(\alpha\).
beta = combined_kernel.get_subkernel_weights()
alpha = mkl.get_alphas()
beta = combined_kernel.get_subkernel_weights();
alpha = mkl.get_alphas();
DoubleMatrix beta = combined_kernel.get_subkernel_weights();
DoubleMatrix alpha = mkl.get_alphas();
beta = combined_kernel.get_subkernel_weights
alpha = mkl.get_alphas
beta <- combined_kernel$get_subkernel_weights()
alpha <- mkl$get_alphas()
double[] beta = combined_kernel.get_subkernel_weights();
double[] alpha = mkl.get_alphas();
auto beta = combined_kernel->get_subkernel_weights();
auto alpha = mkl->get_alphas();
We set the updated kernel and predict CRegressionLabels for test data.
combined_kernel.init(features_train, features_test)
labels_predict = mkl.apply_regression()
combined_kernel.init(features_train, features_test);
labels_predict = mkl.apply_regression();
combined_kernel.init(features_train, features_test);
RegressionLabels labels_predict = mkl.apply_regression();
combined_kernel.init features_train, features_test
labels_predict = mkl.apply_regression
combined_kernel$init(features_train, features_test)
labels_predict <- mkl$apply_regression()
combined_kernel.init(features_train, features_test);
RegressionLabels labels_predict = mkl.apply_regression();
combined_kernel->init(features_train, features_test);
auto labels_predict = mkl->apply_regression();
Finally, we can evaluate the CMeanSquaredError.
error = MeanSquaredError()
mse = error.evaluate(labels_predict, labels_test)
error = MeanSquaredError();
mse = error.evaluate(labels_predict, labels_test);
MeanSquaredError error = new MeanSquaredError();
double mse = error.evaluate(labels_predict, labels_test);
error = Modshogun::MeanSquaredError.new
mse = error.evaluate labels_predict, labels_test
error <- MeanSquaredError()
mse <- error$evaluate(labels_predict, labels_test)
MeanSquaredError error = new MeanSquaredError();
double mse = error.evaluate(labels_predict, labels_test);
auto error = some<CMeanSquaredError>();
auto mse = error->evaluate(labels_predict, labels_test);
Wikipedia: Multiple_kernel_learning
[SRatschSchaferScholkopf06] | S. Sonnenburg, G. Rätsch, C. Schäfer, and B. Schölkopf. Large scale multiple kernel learning. The Journal of Machine Learning Research, 7:1531–1565, 2006. |