Skip to content

Instantly share code, notes, and snippets.

@thisMagpie
Created January 30, 2013 14:15
Show Gist options
  • Save thisMagpie/4673536 to your computer and use it in GitHub Desktop.
Save thisMagpie/4673536 to your computer and use it in GitHub Desktop.
puts the files in the right format x y xerror yerror
import java.util.Scanner;
class GNUPlot {
private double gradient;
private double xVariance;
private double yVariance;
private double offset;
private double xxVariance;
private double yyVariance;
private double covariance;
private double sumx2;
private int n;
private final double[] x;
private double[] residual;
private double gradientError;
private final double[] y;
private double linearCorrelationCoefficient;
private double rss;
private final int max = 15;
public GNUPlot(){
gradient = 0.0;
n = 0;
x= new double[max];
y= new double[max];
residual = new double[max];
offset=0.0;
gradientError=0.0;
xxVariance=0.0; yyVariance=0.0;
covariance =0.0;
sumx2 = 0.0;
linearCorrelationCoefficient =0;
}
public void printGradient(Scanner xScan, Scanner yScan){
double sumx = 0.0, sumy = 0.0;
//raead in x file and y file
while(n< max){
x[n] = IOUtil.skipToDouble(xScan);
y[n] = IOUtil.skipToDouble(yScan);
sumx += x[n];
sumx2 += x[n] * x[n];
sumy += y[n];
n++;
}
xVariance = sumx / n;
yVariance = sumy / n;
//works out the difference of least squares fit
for (int i = 0; i < n; i++) {
xxVariance += (x[i] - xVariance) * (x[i] - xVariance);
yyVariance += (y[i] - yVariance) * (y[i] - yVariance);
covariance += (x[i] - xVariance) * (y[i] - yVariance);
}
gradient = covariance / xxVariance;//linear correlation coefficient
offset = yVariance - gradient * xVariance;
}
public double[] computeResidual(){
//degrees of freedom=number of points minus constrains
int degreesFreedom = n - 2;
rss = 0.0; //standard error in mean i.e. residual sum of squares
double ssr = 0.0; // regression sum of squares
for (int i = 0; i < n; i++){
double fit = gradient*x[i] + offset;
rss += (fit - y[i]) * (fit - y[i]);
ssr += (fit - yVariance) * (fit - yVariance);
residual[i] =y[i]- fit;
}
linearCorrelationCoefficient = ssr / yyVariance;
double svar = rss / degreesFreedom;
double svar1 = svar / xxVariance;
double svar0 = svar/n + xVariance*xVariance*svar1;
gradientError=Math.sqrt(svar1);
System.out.printf(" R absolute = %g", linearCorrelationCoefficient);
System.out.println();
System.out.printf(" std fit of the gradient = %g ", svar0);
System.out.println();
// print results
System.out.printf(" The gradient is %g ",this.gradient);
System.out.println();
System.out.printf(" error of the gradient = %g ", gradientError);
System.out.println();
System.out.printf(" The the offset is %g ",offset);
System.out.printf(" std error of the offset = %g ",Math.sqrt(svar0));
System.out.println();
//TODO put into a method
svar0 = svar * sumx2 / (n * xxVariance);
System.out.printf(" SSTO = %g ", yyVariance);
System.out.println();
System.out.printf(" Sum of squares of Residuals: %g ", rss);
System.out.println();
System.out.printf(" Regression sum of squares is %g " , ssr);
System.out.println();
return residual;
}
public double getGradient(){
return gradient;
}
public double getGradientError(){
return gradientError;
}
public double[] getX(){
return x;
}
public double[] getY(){
return y;
}
public double rss(){
return rss;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment