Last active
June 1, 2019 10:08
-
-
Save vkorbes/fc862d1546e17aa87b07f8adce2bf610 to your computer and use it in GitHub Desktop.
network
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| package main | |
| import ( | |
| "encoding/json" | |
| "fmt" | |
| "github.com/gorilla/websocket" | |
| "math/rand" | |
| "math" | |
| "net/http" | |
| "os" | |
| "time" | |
| ) | |
| type Values struct { | |
| Progress int | |
| Loss []float64 | |
| Table []string | |
| } | |
| type Receive struct { | |
| Top5 [][]string | |
| Dataset [][]float64 | |
| MeanKm float64 | |
| MeanAge float64 | |
| StdKm float64 | |
| StdAge float64 | |
| MinPrice int | |
| MaxPrice int | |
| } | |
| var upgrader = websocket.Upgrader{ | |
| ReadBufferSize: 1024, | |
| WriteBufferSize: 1024, | |
| } | |
| func main() { | |
| port := "8080" | |
| if len(os.Args) > 2 { | |
| port = os.Args[2] | |
| } | |
| http.HandleFunc("/nn", serve) | |
| fmt.Println(http.ListenAndServe(fmt.Sprintf(":%s", port), nil)) | |
| } | |
| func serve(w http.ResponseWriter, r *http.Request) { | |
| upgrader.CheckOrigin = func(r *http.Request) bool { return true } | |
| conn, err := upgrader.Upgrade(w, r, nil) | |
| if err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| fmt.Println("Connected!") | |
| for { | |
| msgType, msg, err := conn.ReadMessage() | |
| if err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| if string(msg) == "ping" { | |
| fmt.Printf("%s nn sent: %s\n", conn.RemoteAddr(), string(msg)) | |
| if err = conn.WriteMessage(msgType, []byte("pong")); err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| } else { | |
| var unpackhere Receive | |
| if err := json.Unmarshal(msg, &unpackhere); err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| nn(unpackhere.Dataset, w, r, conn) | |
| } | |
| } | |
| } | |
| // 888 d8b 888 | |
| // 888 Y8P 888 | |
| // 888 888 | |
| // 88888b. 888d888 .d88b. .d88888 888 .d8888b 888888 .d88b. 888d888 | |
| // 888 "88b 888P" d8P Y8b d88" 888 888 d88P" 888 d88""88b 888P" | |
| // 888 888 888 88888888 888 888 888 888 888 888 888 888 | |
| // 888 d88P 888 Y8b. Y88b 888 888 Y88b. Y88b. Y88..88P 888 | |
| // 88888P" 888 "Y8888 "Y88888 888 "Y8888P "Y888 "Y88P" 888 | |
| // 888 | |
| // 888 | |
| // 888 | |
| type Predictor interface { | |
| Inputs() int | |
| Predict([]float64) []float64 | |
| Parameters() int | |
| Gradients([]float64) (params, inputs []float64) | |
| AddToParameters([]float64) | |
| } | |
| // 88888b. .d88b. 888 888 888d888 .d88b. 88888b. | |
| // 888 "88b d8P Y8b 888 888 888P" d88""88b 888 "88b | |
| // 888 888 88888888 888 888 888 888 888 888 888 | |
| // 888 888 Y8b. Y88b 888 888 Y88..88P 888 888 | |
| // 888 888 "Y8888 "Y88888 888 "Y88P" 888 888 | |
| type Neuron struct { | |
| Weights []float64 | |
| Bias float64 | |
| } | |
| func NewNeuronRandom(inputs int, src rand.Source) *Neuron { | |
| r := rand.New(src) | |
| n := &Neuron{Weights: make([]float64, inputs), Bias: r.Float64()} | |
| for i := range n.Weights { | |
| n.Weights[i] = (r.Float64() - 0.5) / 2 | |
| } | |
| return n | |
| } | |
| func (n *Neuron) Inputs() int { return len(n.Weights) } | |
| func (n *Neuron) Parameters() int { return n.Inputs() + 1 } | |
| func (n *Neuron) Predict(inputs []float64) []float64 { | |
| v := 0.0 | |
| for i, input := range inputs { | |
| v += input * n.Weights[i] | |
| } | |
| return []float64{v + n.Bias} | |
| } | |
| func (n *Neuron) Gradients(x []float64) (params, inputs []float64) { | |
| params = append(params, 1) // ∂y/∂b | |
| params = append(params, x...) // ∂y/∂w(i) | |
| inputs = append(inputs, n.Weights...) // ∂y/∂x(i) | |
| return params, inputs | |
| } | |
| func (n *Neuron) AddToParameters(ps []float64) { | |
| n.Bias += ps[0] | |
| for i, p := range ps[1:] { | |
| n.Weights[i] += p | |
| } | |
| } | |
| // 888 | |
| // 888 | |
| // 888 | |
| // 888 .d88b. .d8888b .d8888b | |
| // 888 d88""88b 88K 88K | |
| // 888 888 888 "Y8888b. "Y8888b. | |
| // 888 Y88..88P X88 X88 | |
| // 888 "Y88P" 88888P' 88888P' | |
| func MeanSquaredErrorValue(p Predictor, data [][]float64) float64 { | |
| mse := 0.0 | |
| for _, row := range data { | |
| inputs, output := row[:p.Inputs()], row[p.Inputs()] | |
| err := output - p.Predict(inputs)[0] | |
| mse += err * err | |
| } | |
| return mse / float64(len(data)) | |
| } | |
| func MeanSquaredErrorGradients(p Predictor, dataset [][]float64) []float64 { | |
| gradients := make([]float64, p.Parameters()) | |
| for _, row := range dataset { | |
| inputs, output := row[:p.Inputs()], row[p.Inputs()] | |
| v := p.Predict(inputs)[0] | |
| gs, _ := p.Gradients(inputs) | |
| for i, g := range gs { | |
| gradients[i] += (v - output) * g | |
| } | |
| } | |
| d := (2 / float64(len(dataset))) | |
| for i := range gradients { | |
| gradients[i] = d * gradients[i] | |
| } | |
| return gradients | |
| } | |
| // 888 | |
| // 888 | |
| // 888 | |
| // .d88b. 888d888 8888b. .d88888 .d88b. .d8888b .d8888b | |
| // d88P"88b 888P" "88b d88" 888 d8P Y8b 88K d88P" | |
| // 888 888 888 .d888888 888 888 88888888 "Y8888b. 888 | |
| // Y88b 888 888 888 888 d8b Y88b 888 Y8b. X88 Y88b. d8b | |
| // "Y88888 888 "Y888888 Y8P "Y88888 "Y8888 88888P' "Y8888P Y8P | |
| // 888 | |
| // Y8b d88P | |
| // "Y88P" | |
| func GradientDescentStep(p Predictor, learningRate float64, dataset [][]float64) { | |
| gs := MeanSquaredErrorGradients(p, dataset) | |
| for i, g := range gs { | |
| gs[i] = -learningRate * g | |
| } | |
| p.AddToParameters(gs) | |
| } | |
| // 888 | |
| // 888 | |
| // 888 | |
| // 888 8888b. 888 888 .d88b. 888d888 | |
| // 888 "88b 888 888 d8P Y8b 888P" | |
| // 888 .d888888 888 888 88888888 888 | |
| // 888 888 888 Y88b 888 Y8b. 888 | |
| // 888 "Y888888 "Y88888 "Y8888 888 | |
| // 888 | |
| // Y8b d88P | |
| // "Y88P" | |
| type Layer struct { | |
| Neurons []*Neuron | |
| Activation *ActivationFunction | |
| } | |
| func NewLayer(inputs, outputs int, act *ActivationFunction, src rand.Source) *Layer { | |
| neurons := make([]*Neuron, outputs) | |
| for i := range neurons { | |
| neurons[i] = NewNeuronRandom(inputs, src) | |
| } | |
| return &Layer{Neurons: neurons, Activation: act} | |
| } | |
| func (l *Layer) Inputs() int { return l.Neurons[0].Inputs() } | |
| func (l *Layer) Outputs() int { return len(l.Neurons) } | |
| func (l *Layer) Parameters() int { return len(l.Neurons) * l.Neurons[0].Parameters() } | |
| func (l *Layer) Predict(inputs []float64) []float64 { | |
| var vs []float64 | |
| for _, neuron := range l.Neurons { | |
| z := neuron.Predict(inputs)[0] | |
| a := l.Activation.Value(z) | |
| vs = append(vs, a) | |
| // vs = append(vs, z) | |
| } | |
| return vs | |
| } | |
| func (l *Layer) Gradients(x []float64) (params, inputs []float64) { | |
| inputs = make([]float64, l.Inputs()) | |
| for _, neuron := range l.Neurons { | |
| z := neuron.Predict(x)[0] | |
| zg := l.Activation.Gradient(z) | |
| gs, is := neuron.Gradients(x) | |
| for _, g := range gs { | |
| params = append(params, g*zg) | |
| } | |
| for i, g := range is { | |
| inputs[i] += g * zg | |
| } | |
| } | |
| return params, inputs | |
| } | |
| // HOW!!! | |
| func (l *Layer) AddToParameters(values []float64) { | |
| for _, neuron := range l.Neurons { | |
| // get this neuron's parameters | |
| params := values[:neuron.Parameters()-neuron.Inputs()] | |
| neuron.AddToParameters(params) | |
| // drop those parameters from the remaining list. | |
| values = values[neuron.Parameters()-neuron.Inputs():] | |
| } | |
| } | |
| // 888 d8b 888 d8b | |
| // 888 Y8P 888 Y8P | |
| // 888 888 | |
| // 8888b. .d8888b 888888 888 888 888 8888b. 888888 888 .d88b. 88888b. | |
| // "88b d88P" 888 888 888 888 "88b 888 888 d88""88b 888 "88b | |
| // .d888888 888 888 888 Y88 88P .d888888 888 888 888 888 888 888 | |
| // 888 888 Y88b. Y88b. 888 Y8bd8P 888 888 Y88b. 888 Y88..88P 888 888 | |
| // "Y888888 "Y8888P "Y888 888 Y88P "Y888888 "Y888 888 "Y88P" 888 888 | |
| type ActivationFunction struct { | |
| Value func(float64) float64 | |
| Gradient func(float64) float64 | |
| } | |
| var ( | |
| Identity = &ActivationFunction{ | |
| func(x float64) float64 { return x }, | |
| func(x float64) float64 { return 1 }, | |
| } | |
| sigmoid = func(x float64) float64 { return 1 / (1 + math.Exp(-x)) } | |
| Sigmoid = &ActivationFunction{ | |
| sigmoid, | |
| func(x float64) float64 { return sigmoid(x) * (1 - sigmoid(x)) }, | |
| } | |
| ) | |
| // 888 888 | |
| // 888 888 | |
| // 888 888 | |
| // 88888b. .d88b. 888888 888 888 888 .d88b. 888d888 888 888 | |
| // 888 "88b d8P Y8b 888 888 888 888 d88""88b 888P" 888 .88P | |
| // 888 888 88888888 888 888 888 888 888 888 888 888888K | |
| // 888 888 Y8b. Y88b. Y88b 888 d88P Y88..88P 888 888 "88b | |
| // 888 888 "Y8888 "Y888 "Y8888888P" "Y88P" 888 888 888 | |
| type Network struct { | |
| Layers []*Layer | |
| } | |
| func NewNetwork(layers ...*Layer) (*Network, error) { | |
| for i := 1; i < len(layers); i++ { | |
| if layers[i-1].Outputs() != layers[i].Inputs() { | |
| return nil, fmt.Errorf("layer %d generates %d outputs, but next layer only accepts %d inputs", | |
| i, layers[i-1].Outputs(), layers[i].Inputs()) | |
| } | |
| } | |
| return &Network{layers}, nil | |
| } | |
| func (nn *Network) Inputs() int { return nn.Layers[0].Inputs() } | |
| func (nn *Network) Outputs() int { return nn.Layers[len(nn.Layers)-1].Outputs() } | |
| func (nn *Network) Predict(inputs []float64) []float64 { | |
| vs := inputs | |
| for _, l := range nn.Layers { | |
| vs = l.Predict(vs) | |
| } | |
| return vs | |
| } | |
| func (nn *Network) Parameters() int { | |
| count := 0 | |
| for _, l := range nn.Layers { | |
| count += l.Parameters() | |
| } | |
| return count | |
| } | |
| func (nn *Network) Gradients(x []float64) (params, inputs []float64) { | |
| /* | |
| The goal here is: | |
| - Get a list of gradients for every neuron in every layer. | |
| - For each list, go through every neuron in the layer. | |
| - For each neuron in the layer, go through every neuron in the next layer. | |
| - For every neuron in the next layer, sum all the input gradients that | |
| connect to the current neuron, in the current layer. | |
| - Then multiply all parameters in the current layer by that sum. | |
| */ | |
| predictions := make([][]float64, len(nn.Layers)) | |
| vs := x | |
| for i, l := range nn.Layers { | |
| predictions[i] = append(predictions[i], vs...) | |
| vs = l.Predict(vs) | |
| } | |
| // New gradients. | |
| ng := []float64{} | |
| // Previous layer input gradients. | |
| plig := []float64{} | |
| for l := len(nn.Layers)-1; l >= 0; l-- { | |
| lg, ig := nn.Layers[l].Gradients(predictions[l]) | |
| if l == len(nn.Layers)-1 { | |
| ng = append(ng, lg...) | |
| plig = ig | |
| continue | |
| } | |
| for i, _ := range nn.Layers[l].Neurons { | |
| // New layer gradients. The adjustment factor. | |
| nlg := 0.0 | |
| for pi, _ := range nn.Layers[l+1].Neurons { | |
| // In the line below, | |
| // ((len(nn.Layers[l].Neurons) - 1) | |
| // refers to the size of each neuron within that slice. | |
| // By multiplying it by pi I jump from neuron to neuron. | |
| // Then i tells which weight I want within that neuron. | |
| nlg += plig[i + ((len(nn.Layers[l].Neurons) - 1) * pi)] | |
| } | |
| // Updated parameter. | |
| for up := 0; up < nn.Layers[l].Neurons[i].Parameters(); up++ { | |
| lg[(i * nn.Layers[l].Neurons[i].Parameters()) + up] *= nlg | |
| } | |
| plig = ig | |
| } | |
| ng = append(lg, ng...) | |
| } | |
| return ng, nil | |
| } | |
| func (nn *Network) AddToParameters(x []float64) { | |
| for _, l := range nn.Layers { | |
| l.AddToParameters(x[:l.Parameters()]) | |
| x = x[l.Parameters():] | |
| } | |
| } | |
| // d8b | |
| // Y8P | |
| // 88888b.d88b. 8888b. 888 88888b. | |
| // 888 "888 "88b "88b 888 888 "88b | |
| // 888 888 888 .d888888 888 888 888 | |
| // 888 888 888 888 888 888 888 888 | |
| // 888 888 888 "Y888888 888 888 888 | |
| func nn(dataset [][]float64, w http.ResponseWriter, r *http.Request, conn *websocket.Conn) { | |
| inputs := len(dataset[0]) - 1 | |
| src := rand.NewSource(time.Now().Unix()) | |
| learningRate := 0.1 | |
| // layer := NewLayer(inputs, 1, Sigmoid, src) | |
| network, err := NewNetwork( | |
| NewLayer(inputs, 3, Sigmoid, src), | |
| NewLayer(3, 1, Sigmoid, src), | |
| ) | |
| if err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| toSend := Values{ | |
| Progress: 100, | |
| Loss: []float64{}, | |
| Table: []string{}, | |
| } | |
| for i := 0; i < 260; i++ { | |
| mserr := MeanSquaredErrorValue(network, dataset) | |
| GradientDescentStep(network, learningRate, dataset) | |
| if i%4 == 0 { | |
| toSend.Loss = append(toSend.Loss, mserr) | |
| } | |
| if i%13 == 0 { | |
| toSend.Progress = ((i/13)+1)*5 | |
| toSend.Table = []string{ | |
| fmt.Sprintf("%f", network.Predict(dataset[0][:inputs])[0]), | |
| fmt.Sprintf("%f", network.Predict(dataset[1][:inputs])[0]), | |
| fmt.Sprintf("%f", network.Predict(dataset[2][:inputs])[0]), | |
| fmt.Sprintf("%f", network.Predict(dataset[3][:inputs])[0]), | |
| fmt.Sprintf("%f", network.Predict(dataset[4][:inputs])[0]), | |
| } | |
| js, err := json.Marshal(toSend) | |
| if err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| if err = conn.WriteMessage(1, js); err != nil { | |
| fmt.Println(err.Error()) | |
| http.Error(w, err.Error(), http.StatusInternalServerError) | |
| return | |
| } | |
| time.Sleep(500 * time.Millisecond) | |
| fmt.Printf("%d cost: %.6f\n", i, mserr) | |
| } | |
| } | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment