Merge branch 'devel' of git://gitorious.org/mldemos/mldemos into devel
[mldemos:mldemos.git] / _AlgorithmsPlugins / KernelMethods / regressorKRLS.cpp
1 /*********************************************************************\r
2 MLDemos: A User-Friendly visualization toolkit for machine learning\r
3 Copyright (C) 2010  Basilio Noris\r
4 Contact: mldemos@b4silio.com\r
5 \r
6 This library is free software; you can redistribute it and/or\r
7 modify it under the terms of the GNU Lesser General Public\r
8 License as published by the Free Software Foundation; either\r
9 version 2.1 of the License, or (at your option) any later version.\r
10 \r
11 This library is distributed in the hope that it will be useful,\r
12 but WITHOUT ANY WARRANTY; without even the implied warranty of\r
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU\r
14 Library General Public License for more details.\r
15 \r
16 You should have received a copy of the GNU Lesser General Public\r
17 License along with this library; if not, write to the Free\r
18 Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\r
19 *********************************************************************/\r
20 #include <public.h>\r
21 #include "regressorKRLS.h"\r
22 \r
23 using namespace std;\r
24 \r
25 const char *RegressorKRLS::GetInfoString()\r
26 {\r
27         char *text = new char[255];\r
28         sprintf(text, "Kernel Ridge Least Squares\n");\r
29         sprintf(text, "%sCapacity: %d", text, capacity);\r
30         sprintf(text, "%sKernel: ", text);\r
31         switch(kernelType)\r
32         {\r
33         case 0:\r
34                 sprintf(text, "%s linear", text);\r
35                 break;\r
36         case 1:\r
37                 sprintf(text, "%s polynomial (deg: %f %f width: %f)", text, kernelDegree, kernelParam);\r
38                 break;\r
39         case 2:\r
40                 sprintf(text, "%s rbf (gamma: %f)", text, kernelParam);\r
41                 break;\r
42         }\r
43         sprintf(text, "%seps: %f\n", text, epsilon);\r
44         sprintf(text, "%Basis Functions: %d\n", text, GetSVs().size());\r
45         return text;\r
46 }\r
47 \r
48 RegressorKRLS::~RegressorKRLS()\r
49 {\r
50         DEL(linTrainer);\r
51         DEL(polTrainer);\r
52         DEL(rbfTrainer);\r
53 }\r
54 \r
55 void RegressorKRLS::Train(std::vector< fvec > _samples, ivec _labels)\r
56 {\r
57         if(capacity == 1) capacity = 2;\r
58         samples.clear();\r
59         labels.clear();\r
60     if(!_samples.size()) return;\r
61     if(_samples[0].size() > 2) return; // no multi-dim for now...\r
62 \r
63         FOR(i, _samples.size())\r
64         {\r
65                 reg_sample_type samp;\r
66                 samp(0) = _samples[i][0];\r
67                 samples.push_back(samp);\r
68                 labels.push_back(_samples[i][1]);\r
69         }\r
70         randomize_samples(samples, labels);\r
71 \r
72     DEL(linTrainer);\r
73     DEL(polTrainer);\r
74     DEL(rbfTrainer);\r
75     switch(kernelType)\r
76         {\r
77         case 0:\r
78                 {\r
79                         linTrainer = new dlib::krls<reg_lin_kernel>(reg_lin_kernel(),epsilon,capacity ? capacity : 1000000);\r
80                         FOR(i, samples.size())\r
81                         {\r
82                                 linTrainer->train(samples[i], labels[i]);\r
83                         }\r
84                         linFunc = linTrainer->get_decision_function();\r
85                 }\r
86                 break;\r
87         case 1:\r
88                 {\r
89                         polTrainer = new dlib::krls<reg_pol_kernel>(reg_pol_kernel(1./kernelParam,0,kernelDegree),epsilon,capacity ? capacity : 1000000);\r
90                         FOR(i, samples.size())\r
91                         {\r
92                                 polTrainer->train(samples[i], labels[i]);\r
93                         }\r
94                         polFunc = polTrainer->get_decision_function();\r
95                 }\r
96                 break;\r
97         case 2:\r
98                 {\r
99                         rbfTrainer = new dlib::krls<reg_rbf_kernel>(reg_rbf_kernel(1./kernelParam),epsilon,capacity ? capacity : 1000000);\r
100                         FOR(i, samples.size())\r
101                         {\r
102                                 rbfTrainer->train(samples[i], labels[i]);\r
103                         }\r
104                         rbfFunc = rbfTrainer->get_decision_function();\r
105                 }\r
106                 break;\r
107         }\r
108 }\r
109 \r
110 fvec  RegressorKRLS::Test( const fvec &_sample )\r
111 {\r
112         fvec res;\r
113         res.resize(2,0);\r
114     if(!linTrainer && !polTrainer && !rbfTrainer) return res;\r
115     reg_sample_type sample;\r
116         sample(0) = _sample[0];\r
117         switch(kernelType)\r
118         {\r
119         case 0:\r
120                 res[0] = (*linTrainer)(sample);\r
121                 break;\r
122         case 1:\r
123                 res[0] = (*polTrainer)(sample);\r
124                 break;\r
125         case 2:\r
126                 res[0] = (*rbfTrainer)(sample);\r
127                 break;\r
128         }\r
129         return res;\r
130 }\r
131 \r
132 fVec  RegressorKRLS::Test( const fVec &_sample )\r
133 {\r
134         fVec res;\r
135         reg_sample_type sample;\r
136         sample(0) = _sample._[0];\r
137         switch(kernelType)\r
138         {\r
139         case 0:\r
140                 res[0] = (*linTrainer)(sample);\r
141                 break;\r
142         case 1:\r
143                 res[0] = (*polTrainer)(sample);\r
144                 break;\r
145         case 2:\r
146                 res[0] = (*rbfTrainer)(sample);\r
147                 break;\r
148         }\r
149         return res;\r
150 }\r
151 \r
152 std::vector<fvec> RegressorKRLS::GetSVs()\r
153 {\r
154         vector<fvec> SVs;\r
155         if(kernelType == 0)\r
156         {\r
157                 FOR(i, linFunc.basis_vectors.nr())\r
158                 {\r
159                         fvec sv;\r
160                         sv.resize(2,0);\r
161                         sv[0] = linFunc.basis_vectors(i)(0);\r
162                         SVs.push_back(sv);\r
163                 }\r
164         }\r
165         else if(kernelType == 1)\r
166         {\r
167                 FOR(i, polFunc.basis_vectors.nr())\r
168                 {\r
169                         fvec sv;\r
170                         sv.resize(2,0);\r
171                         sv[0] = polFunc.basis_vectors(i)(0);\r
172                         SVs.push_back(sv);\r
173                 }\r
174         }\r
175         else if(kernelType == 2)\r
176         {\r
177                 FOR(i, rbfFunc.basis_vectors.nr())\r
178                 {\r
179                         fvec sv;\r
180                         sv.resize(2,0);\r
181                         sv[0] = rbfFunc.basis_vectors(i)(0);\r
182                         SVs.push_back(sv);\r
183                 }\r
184         }\r
185 \r
186         FOR(i, SVs.size())\r
187         {\r
188                 int closest = 0;\r
189                 double dist = DBL_MAX;\r
190                 FOR(j, samples.size())\r
191                 {\r
192                         double d = abs(samples[j](0)-SVs[i][0]);\r
193                         if(d < dist)\r
194                         {\r
195                                 dist = d;\r
196                                 closest = j;\r
197                         }\r
198                 }\r
199                 SVs[i][1] = labels[closest];\r
200         }\r
201         return SVs;\r
202 }\r