silence compiler warnings: format string errors, string constant conversions
[mldemos:mldemos.git] / _AlgorithmsPlugins / KernelMethods / regressorKRLS.cpp
1 /*********************************************************************\r
2 MLDemos: A User-Friendly visualization toolkit for machine learning\r
3 Copyright (C) 2010  Basilio Noris\r
4 Contact: mldemos@b4silio.com\r
5 \r
6 This library is free software; you can redistribute it and/or\r
7 modify it under the terms of the GNU Lesser General Public\r
8 License as published by the Free Software Foundation; either\r
9 version 2.1 of the License, or (at your option) any later version.\r
10 \r
11 This library is distributed in the hope that it will be useful,\r
12 but WITHOUT ANY WARRANTY; without even the implied warranty of\r
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU\r
14 Library General Public License for more details.\r
15 \r
16 You should have received a copy of the GNU Lesser General Public\r
17 License along with this library; if not, write to the Free\r
18 Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\r
19 *********************************************************************/\r
20 #include <public.h>\r
21 #include "regressorKRLS.h"\r
22 \r
23 using namespace std;\r
24 \r
25 const char *RegressorKRLS::GetInfoString()\r
26 {\r
27         char *text = new char[255];\r
28         sprintf(text, "Kernel Ridge Least Squares\n");\r
29         sprintf(text, "%sCapacity: %d", text, capacity);\r
30         sprintf(text, "%sKernel: ", text);\r
31         switch(kernelType)\r
32         {\r
33         case 0:\r
34                 sprintf(text, "%s linear", text);\r
35                 break;\r
36         case 1:\r
37                 sprintf(text, "%s polynomial (deg: %d width: %f)", text, kernelDegree, kernelParam);\r
38                 break;\r
39         case 2:\r
40                 sprintf(text, "%s rbf (gamma: %f)", text, kernelParam);\r
41                 break;\r
42         }\r
43         sprintf(text, "%seps: %f\n", text, epsilon);\r
44         sprintf(text, "%sBasis Functions: %lu\n", text, (unsigned long)GetSVs().size());\r
45         return text;\r
46 }\r
47 \r
48 RegressorKRLS::~RegressorKRLS()\r
49 {\r
50         DEL(linTrainer);\r
51         DEL(polTrainer);\r
52         DEL(rbfTrainer);\r
53 }\r
54 \r
55 void RegressorKRLS::Train(std::vector< fvec > _samples, ivec _labels)\r
56 {\r
57         if(capacity == 1) capacity = 2;\r
58         samples.clear();\r
59         labels.clear();\r
60     if(!_samples.size()) return;\r
61     if(_samples[0].size() > 2) return; // no multi-dim for now...\r
62     dim = _samples[0].size();\r
63 \r
64         FOR(i, _samples.size())\r
65         {\r
66                 reg_sample_type samp;\r
67                 samp(0) = _samples[i][0];\r
68                 samples.push_back(samp);\r
69                 labels.push_back(_samples[i][1]);\r
70         }\r
71         randomize_samples(samples, labels);\r
72 \r
73     DEL(linTrainer);\r
74     DEL(polTrainer);\r
75     DEL(rbfTrainer);\r
76     switch(kernelType)\r
77         {\r
78         case 0:\r
79                 {\r
80                         linTrainer = new dlib::krls<reg_lin_kernel>(reg_lin_kernel(),epsilon,capacity ? capacity : 1000000);\r
81                         FOR(i, samples.size())\r
82                         {\r
83                                 linTrainer->train(samples[i], labels[i]);\r
84                         }\r
85                         linFunc = linTrainer->get_decision_function();\r
86                 }\r
87                 break;\r
88         case 1:\r
89                 {\r
90                         polTrainer = new dlib::krls<reg_pol_kernel>(reg_pol_kernel(1./kernelParam,0,kernelDegree),epsilon,capacity ? capacity : 1000000);\r
91                         FOR(i, samples.size())\r
92                         {\r
93                                 polTrainer->train(samples[i], labels[i]);\r
94                         }\r
95                         polFunc = polTrainer->get_decision_function();\r
96                 }\r
97                 break;\r
98         case 2:\r
99                 {\r
100                         rbfTrainer = new dlib::krls<reg_rbf_kernel>(reg_rbf_kernel(1./kernelParam),epsilon,capacity ? capacity : 1000000);\r
101                         FOR(i, samples.size())\r
102                         {\r
103                                 rbfTrainer->train(samples[i], labels[i]);\r
104                         }\r
105                         rbfFunc = rbfTrainer->get_decision_function();\r
106                 }\r
107                 break;\r
108         }\r
109 }\r
110 \r
111 fvec  RegressorKRLS::Test( const fvec &_sample )\r
112 {\r
113         fvec res;\r
114         res.resize(2,0);\r
115     if(!linTrainer && !polTrainer && !rbfTrainer) return res;\r
116     reg_sample_type sample;\r
117         sample(0) = _sample[0];\r
118         switch(kernelType)\r
119         {\r
120         case 0:\r
121                 res[0] = (*linTrainer)(sample);\r
122                 break;\r
123         case 1:\r
124                 res[0] = (*polTrainer)(sample);\r
125                 break;\r
126         case 2:\r
127                 res[0] = (*rbfTrainer)(sample);\r
128                 break;\r
129         }\r
130         return res;\r
131 }\r
132 \r
133 fVec  RegressorKRLS::Test( const fVec &_sample )\r
134 {\r
135         fVec res;\r
136         reg_sample_type sample;\r
137         sample(0) = _sample._[0];\r
138         switch(kernelType)\r
139         {\r
140         case 0:\r
141                 res[0] = (*linTrainer)(sample);\r
142                 break;\r
143         case 1:\r
144                 res[0] = (*polTrainer)(sample);\r
145                 break;\r
146         case 2:\r
147                 res[0] = (*rbfTrainer)(sample);\r
148                 break;\r
149         }\r
150         return res;\r
151 }\r
152 \r
153 std::vector<fvec> RegressorKRLS::GetSVs()\r
154 {\r
155         vector<fvec> SVs;\r
156         if(kernelType == 0)\r
157         {\r
158                 FOR(i, linFunc.basis_vectors.nr())\r
159                 {\r
160                         fvec sv;\r
161                         sv.resize(2,0);\r
162                         sv[0] = linFunc.basis_vectors(i)(0);\r
163                         SVs.push_back(sv);\r
164                 }\r
165         }\r
166         else if(kernelType == 1)\r
167         {\r
168                 FOR(i, polFunc.basis_vectors.nr())\r
169                 {\r
170                         fvec sv;\r
171                         sv.resize(2,0);\r
172                         sv[0] = polFunc.basis_vectors(i)(0);\r
173                         SVs.push_back(sv);\r
174                 }\r
175         }\r
176         else if(kernelType == 2)\r
177         {\r
178                 FOR(i, rbfFunc.basis_vectors.nr())\r
179                 {\r
180                         fvec sv;\r
181                         sv.resize(2,0);\r
182                         sv[0] = rbfFunc.basis_vectors(i)(0);\r
183                         SVs.push_back(sv);\r
184                 }\r
185         }\r
186 \r
187         FOR(i, SVs.size())\r
188         {\r
189                 int closest = 0;\r
190                 double dist = DBL_MAX;\r
191                 FOR(j, samples.size())\r
192                 {\r
193                         double d = abs(samples[j](0)-SVs[i][0]);\r
194                         if(d < dist)\r
195                         {\r
196                                 dist = d;\r
197                                 closest = j;\r
198                         }\r
199                 }\r
200                 SVs[i][1] = labels[closest];\r
201         }\r
202         return SVs;\r
203 }\r