Last active
February 21, 2025 00:28
-
-
Save realbardia/bee1c7f711b449102c8e0d4cf07010a0 to your computer and use it in GitHub Desktop.
Classic Perceptron implementation
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <vector> | |
#include <cmath> | |
using namespace std; | |
// Step activation function | |
int activation(double x) { | |
return 1.0 / (1.0 + std::exp(-x)); | |
} | |
// Perceptron training | |
void trainPerceptron(vector<vector<double>> X, vector<int> y, | |
double &w1, double &w2, double &b, | |
double alpha, int epochs) { | |
for (int epoch = 0; epoch < epochs; epoch++) { | |
for (int i = 0; i < X.size(); i++) { | |
double x1 = X[i][0], x2 = X[i][1]; | |
double z = x1 * w1 + x2 * w2 + b; | |
int y_pred = activation(z); | |
// Update weights and bias | |
w1 += alpha * (y[i] - y_pred) * x1; | |
w2 += alpha * (y[i] - y_pred) * x2; | |
b += alpha * (y[i] - y_pred); | |
} | |
} | |
} | |
int main() { | |
// AND training data | |
vector<vector<double>> X = {{0,0}, {0,1}, {1,0}, {1,1}}; | |
vector<int> y = {0, 1, 1, 1}; | |
double w1 = 0.0, w2 = 0.0, b = 0.0; // Initialization | |
double alpha = 0.1; // Learning rate | |
int epochs = 10000; // Number of training epochs | |
trainPerceptron(X, y, w1, w2, b, alpha, epochs); | |
// Test the trained model | |
cout << "0 OR 0 = " << activation(0*w1 + 0*w2 + b) << endl; | |
cout << "0 OR 1 = " << activation(0*w1 + 1*w2 + b) << endl; | |
cout << "1 OR 0 = " << activation(1*w1 + 0*w2 + b) << endl; | |
cout << "1 OR 1 = " << activation(1*w1 + 1*w2 + b) << endl; | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment