Created
December 17, 2015 18:56
-
-
Save kaanaksit/ec7c2a320d7afd898bd3 to your computer and use it in GitHub Desktop.
Here are some kernels I like to use for Machine Learning related projects.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# -*- coding: utf-8 -*- | |
__author__ = ('Kaan Akşit') | |
__version__ = '0.1' | |
# Importing necessary libraries. | |
try: | |
import sys,glob,time,copy | |
import numpy as np | |
from numpy.linalg import pinv | |
from math import e | |
except ImportError, err: | |
print "couldn't load module. %s" % (err) | |
sys.exit() | |
# Cosine distance calculation in between two vectors. | |
def CosDis(a,b): | |
dividend = np.dot(a,b) | |
denominator = np.linalg.norm(a,2)*np.linalg.norm(b,2) | |
return dividend*1./denominator | |
# Minkowski distance calculation in between two vectors. | |
def MinDis(a,b,p): | |
return np.linalg.norm(a-b,p) | |
# Chessboard distance calculation in between two vectors. | |
def ChessDis(a,b): | |
return np.amax(abs(a-b)) | |
# Bray curtis distance calculation in between two vectors. | |
def BrayDis(a,b): | |
dividend = np.sum(abs(a-b)) | |
denominator = np.sum(abs(a+b)) | |
return dividend*1./denominator | |
# Canberra distance calculation in between two vectors. | |
def CanberraDis(a,b): | |
dividend = abs(a-b) | |
denominator = abs(a)+abs(b) | |
return np.sum(dividend*1./denominator) | |
# Manhattan distance calculation in between two vectors. | |
def ManhattanDis(a,b): | |
return np.sum(abs(a-b)) | |
# Those kernel are learned from http://crsouza.com/2010/03/kernel-functions-for-machine-learning-applications/ | |
# Radial basis calculation in between two vectors. | |
def RadialBasis(a,b,sigma=2.): | |
gamma = 1./2./sigma**2 | |
return np.exp(-gamma*np.sqrt(np.sum((a-b)**2))) | |
# Exponential kernel calculation in between two vectors. | |
def Exponential(a,b,sigma=2.): | |
gamma = 1./2./sigma**2 | |
return np.exp(-gamma*np.sqrt(np.sum((a-b)))) | |
# Laplacian kernel calculation in between two vectors. | |
def Laplacian(a,b,sigma=2.): | |
gamma = 1./sigma | |
return np.exp(-gamma*np.sqrt(np.sum((a-b)))) | |
# Linear kernel calculation in between two vectors. | |
def Linear(a,b,sigma=0.): | |
return np.dot(a,b.T)+sigma | |
# Bare call response. | |
if __name__ == '__main__': | |
pass | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment