Last active
August 29, 2015 14:16
-
-
Save jfbercher/1a7a7f4a5d887c5a115b to your computer and use it in GitHub Desktop.
Implementation of a blind source separation algorithm for positive sources. ( Oja E, Plumbley M., Neural Comput. 2004 )
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
""" | |
may 2014 | |
# Author: JF Bercher | |
# This work is licensed under CreativeCommons Attribution-ShareAlike license | |
# http://creativecommons.org/licenses/by-sa/4.0/ | |
""" | |
""" | |
Neural Comput. 2004 Sep;16(9):1811-25. | |
Blind separation of positive sources by globally convergent gradient search. | |
Oja E, Plumbley M. | |
http://www.eecs.qmul.ac.uk/~markp/2004/OjaPlumbley04-preprint.pdf. | |
""" | |
#X: observation | |
# 1 - Whitening | |
# z: Vx: whitened version | |
""" | |
m=(mean(X,axis=1)) | |
C=dot(X-m[:,newaxis],(X-m[:,newaxis]).T) # covariance matrix | |
A,V=eig(C) | |
z=dot(V.T,X) | |
mz=(mean(z,axis=1)) | |
Cz=dot(z-mz[:,newaxis],(z-mz[:,newaxis]).T) | |
#print(Cz) | |
""" | |
def s(y): # returns min(0,y_i) | |
I=where(y>0) | |
z=y.copy() | |
z[I]=0 | |
return z | |
def sqimat(M): | |
A,V=eig(M) | |
A=mat(A); V=mat(V) | |
return V*inv(sqrt(diagflat(A)))*V.T | |
# --------------------------------------------------------------- | |
def posICA(X,mu=0.05,maxiter=5000,tol=1e-5,verbose=True): | |
""" | |
Implementation of a blind source separation algorithm for positive sources. | |
The algorithm is described in | |
*Blind separation of positive sources by globally convergent gradient search. | |
Oja E, Plumbley M. | |
Neural Comput. 2004 Sep;16(9):1811-25.* | |
Parameters: | |
------ | |
X: matrix | |
Observed mixture | |
mu:float | |
adaptation step default=0.05, | |
maxiter=5000,tol=1e-5,verbose=True | |
Returns: | |
------- | |
W: separating matrix for whitened sources | |
UnMix: Unmixig matrix for original observations | |
Mest: Estimate of the mixing matrix | |
""" | |
X=matrix(X) | |
p=min(shape(X)) | |
# Initialization | |
DeltaW=eye(p) | |
it=0 | |
# 1- Whitening | |
# matrix way | |
X=matrix(X) | |
m=(mean(X,axis=1)) | |
C=(X-m)*(X-m).T # covariance matrix | |
#A,V=eig(C) | |
#A=mat(A); V=mat(V) | |
#z=V*inv(sqrt(diagflat(A)))*V.T*X | |
z=sqimat(C)*X | |
mz=(mean(z,axis=1)) | |
Cz=(z-mz)*(z-mz).T | |
# 2 - | |
W=mat(randn(p,p)) #init | |
W=sqimat(W*W.T)*W | |
while norm(DeltaW)>tol and it<maxiter: | |
#for k in range(0,maxiter): | |
it=it+1 | |
y=W*z | |
sy=s(y) | |
#W=W-mu*(sy*y.T-y*sy.T)*W | |
DeltaW=mu*(sy*z.T) | |
W=W-DeltaW | |
W=sqimat(W*W.T)*W | |
if verbose: | |
print("Finished after {} ierations, and tolerance {}".format(it,norm(DeltaW))) | |
# Unmixing matrix | |
UnMix=W*sqimat(C) | |
# Mixing matrix: | |
Mest=inv(W*sqimat(C)) | |
return W,UnMix,Mest | |
# --------------------------------------------------------------- | |
if __name__ == '__main__': | |
# TEST | |
t=arange(0,1500) | |
Z=zeros((2,1500)) | |
Z[0,0:1000]=abs(sin(2*pi*0.01*t[:1000])) | |
Z[0,1000:]=2*abs(sin(2*pi*0.01*t[1000:])) | |
Z[1,:]=0*sign(sin(2*pi*0.002*t+pi/4))+0.8*(rand(1500)**2) | |
Z=mat(Z) | |
M=matrix([[0.7, 0.6],[-1, 1]]) | |
X=M*Z | |
# | |
W,UnMix,Mest=posICA(X,mu=0.05,maxiter=5000,tol=1e-7) | |
# | |
Out=UnMix | |
iM=inv(M) | |
y=UnMix*X | |
print("Theoretical inverse : \n", iM/iM[0,0]) | |
print("Practical inverse : \n", Out/Out[0,0]) | |
close("all") | |
figure(1); | |
plot(array(X.T)) | |
title("Observed signal") | |
figure(2) | |
plot(array(Z.T)) | |
title("Initial signal") | |
figure(3) | |
plot(array(y.T)) | |
title("Reconstructed signal") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment