Created
February 19, 2019 19:20
-
-
Save gonzaloruizdevilla/32848d610ce6bf4dba023d57b9a59341 to your computer and use it in GitHub Desktop.
GNB
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include "classifier.h" | |
#include <math.h> | |
#include <string> | |
#include <vector> | |
#include <iostream> | |
using Eigen::ArrayXd; | |
using std::string; | |
using std::vector; | |
// Initializes GNB | |
GNB::GNB() { | |
/** | |
* TODO: Initialize GNB, if necessary. May depend on your implementation. | |
*/ | |
for(int i = 0; i <3; ++i){ | |
mean.push_back(vector<double>(4)); | |
standardDeviation.push_back(vector<double>(4)); | |
} | |
} | |
GNB::~GNB() {} | |
void GNB::train(const vector<vector<double>> &data, | |
const vector<string> &labels) { | |
/** | |
* Trains the classifier with N data points and labels. | |
* @param data - array of N observations | |
* - Each observation is a tuple with 4 values: s, d, s_dot and d_dot. | |
* - Example : [[3.5, 0.1, 5.9, -0.02], | |
* [8.0, -0.3, 3.0, 2.2], | |
* ... | |
* ] | |
* @param labels - array of N labels | |
* - Each label is one of "left", "keep", or "right". | |
* | |
* TODO: Implement the training function for your classifier. | |
*/ | |
vector<int> count = {0,0,0}; | |
for(int i = 0; i < labels.size(); i++) { | |
int idx = 0; | |
if (labels[i].compare("keep") == 0){ | |
idx = 1; | |
} else if (labels[i].compare("right") == 0){ | |
idx = 2; | |
} | |
count[idx] += 1; | |
for(int j = 0; j < 4; j++) { | |
mean[idx][j] += data[i][j]; | |
} | |
} | |
for(int idx = 0; idx < 3; idx++) { | |
for(int j = 0; j < 4; j++) { | |
mean[idx][j] /= count[idx]; | |
} | |
} | |
for(int i = 0; i < labels.size(); i++) { | |
int idx = 0; | |
if (labels[i].compare("keep") == 0){ | |
idx = 1; | |
} else if (labels[i].compare("right") == 0){ | |
idx = 2; | |
} | |
count[idx] += 1; | |
for(int j = 0; j < 4; j++) { | |
standardDeviation[idx][j] += pow(data[i][j] - mean[idx][j], 2); | |
} | |
} | |
for(int idx = 0; idx < 3; idx++) { | |
for(int j = 0; j < 4; j++) { | |
standardDeviation[idx][j] = sqrt(standardDeviation[idx][j] / count[idx]); | |
} | |
} | |
for(int k = 0; k < 3; k++) { | |
std::cout << "count: " << count[k] << std::endl; | |
std::cout << "means: " ; | |
for(int l = 0; l < 4; l++) { | |
std::cout << mean[k][l] << ", "; | |
} | |
std::cout << std::endl; | |
std::cout << "standardDeviation: " ; | |
for(int l = 0; l < 4; l++) { | |
std::cout << standardDeviation[k][l] << ", "; | |
} | |
std::cout << std::endl; | |
} | |
} | |
string GNB::predict(const vector<double> &sample) { | |
/** | |
* Once trained, this method is called and expected to return | |
* a predicted behavior for the given observation. | |
* @param observation - a 4 tuple with s, d, s_dot, d_dot. | |
* - Example: [3.5, 0.1, 8.5, -0.2] | |
* @output A label representing the best guess of the classifier. Can | |
* be one of "left", "keep" or "right". | |
* | |
* TODO: Complete this function to return your classifier's prediction | |
*/ | |
vector<double> probs = {0,0,0}; | |
for (int i = 0; i < 3; ++i){ | |
double prob = 1; | |
for (int j=0; j < 4; ++j){ | |
double norm = 1 / (2 * M_PI * pow(standardDeviation[i][j],2)); | |
double exponent = (pow(sample[j] - mean[i][j], 2) / (2 * pow(standardDeviation[i][j], 2))); | |
prob *= norm * exp(-exponent); | |
} | |
probs[i] = prob; | |
} | |
double maxprob = 0; | |
int idx = 0; | |
for (int i = 0; i < 3; ++i){ | |
if (maxprob < probs[i]){ | |
idx = i; | |
maxprob = probs[i]; | |
} | |
} | |
//std::cout << idx << std::endl; | |
return this -> possible_labels[idx]; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment