Skip to content

Instantly share code, notes, and snippets.

@Garciat
Last active January 25, 2026 13:51
Show Gist options
  • Select an option

  • Save Garciat/54e62d7e2a2b851442a8 to your computer and use it in GitHub Desktop.

Select an option

Save Garciat/54e62d7e2a2b851442a8 to your computer and use it in GitHub Desktop.
import std.stdio, std.conv, std.array, std.algorithm, std.file, std.numeric, std.range, std.math;
ulong rows(T)(T m) { return m.length; }
ulong cols(T)(T m) { return m[0].length; }
struct Matrix(T)
{
immutable ulong rows, cols;
T[] data;
this(ulong m, ulong n)
{
rows = m;
cols = n;
data = new T[rows * cols];
}
this(B)(ulong m, ulong n, B builder)
{
this(m, n);
foreach (i; 0..rows)
{
foreach (j; 0..cols)
{
this[i][j] = builder(i, j);
}
}
}
this(T[][] arr)
{
this(arr.rows, arr.cols, (ulong i, ulong j) => arr[i][j]);
}
auto opIndex(ulong i)
{
return data[i * cols .. (i+1) * cols];
}
auto opIndex(ulong i) const
{
return data[i * cols .. (i+1) * cols];
}
auto transpose() const
{
return Matrix!T(cols, rows, (ulong i, ulong j) => this[j][i]);
}
string toString()
{
return iota(0, rows).map!(i => this[i]).array.to!string;
}
}
alias Features = const Matrix!double;
alias Vector = const(double)[];
auto dot(Vector v, Vector w)
{
//return v[0]*w[0] + v[1]*w[1];
auto x = 0.0;
foreach (i; 0..v.length)
{
x += v[i] * w[i];
}
return x;
}
double model(Vector xs, Vector th)
{
return dot(xs, th);
}
double err(Features xm, Vector ys, Vector th, ulong i)
{
return model(xm[i], th) - ys[i];
}
void descent(double r, Features xm, Features xmt, Vector ys, double[] th, double[] errs)
{
auto rr = r / xm.rows;
foreach (i; 0..xm.rows)
{
errs[i] = err(xm, ys, th, i);
}
foreach (j; 0..xm.cols)
{
th[j] -= rr * dotProduct(errs, xmt[j]);
}
}
double cost(Features xm, Vector ys, Vector th)
{
return
iota(0, xm.rows)
.map!(i => err(xm, ys, th, i))
.map!(x => x * x)
.reduce!"a + b"() / (2 * xm.rows);
}
void n_descent(ulong n, double r, Features xm, Vector ys, double[] th)
{
auto xmt = xm.transpose;
auto errs = new double[xm.rows];
foreach (i; 0..n)
{
descent(r, xm, xmt, ys, th, errs);
}
}
void smart_descent(double z, double r, Features xm, Vector ys, double[] th)
{
auto ct = 0.0;
auto xmt = xm.transpose;
auto errs = new double[xm.rows];
for (;;)
{
auto newct = cost(xm, ys, th);
if (abs(ct - newct) < z) return;
ct = newct;
descent(r, xm, xmt, ys, th, errs);
}
}
auto lines(string s)
{
return s.splitter('\n').filter!(s => s.length > 0);
}
auto likeCsv(T)(string s)
{
return s.splitter(',').map!(to!(ElementType!T)).array;
}
T[] readInput(T, alias Conv = to)(string path)
{
return read(path).to!string.lines.map!(Conv!T).array;
}
void main()
{
auto x = readInput!(double[], likeCsv)("ex2x.txt");
auto y = readInput!double("ex2y.txt");
Features xm = x.map!(r => [1.0] ~ r).array;
Vector ys = y;
auto th = repeat(0.0).take(xm.cols).array;
auto r = 0.07;
n_descent(1_000_000, r, xm, ys, th);
writeln(th);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment