/
example_linearregression_test.go
131 lines (109 loc) · 3.22 KB
/
example_linearregression_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
package gorgonia_test
import (
"fmt"
"log"
"math/rand"
"runtime"
. "gorgonia.org/gorgonia"
"gorgonia.org/tensor"
)
const (
vecSize = 10000
)
// manually generate a fake dataset which is y=2x+random
func xy(dt tensor.Dtype) (x tensor.Tensor, y tensor.Tensor) {
var xBack, yBack interface{}
switch dt {
case Float32:
xBack = tensor.Range(tensor.Float32, 1, vecSize+1).([]float32)
yBackC := tensor.Range(tensor.Float32, 1, vecSize+1).([]float32)
for i, v := range yBackC {
yBackC[i] = v*2 + rand.Float32()
}
yBack = yBackC
case Float64:
xBack = tensor.Range(tensor.Float64, 1, vecSize+1).([]float64)
yBackC := tensor.Range(tensor.Float64, 1, vecSize+1).([]float64)
for i, v := range yBackC {
yBackC[i] = v*2 + rand.Float64()
}
yBack = yBackC
}
x = tensor.New(tensor.WithBacking(xBack), tensor.WithShape(vecSize))
y = tensor.New(tensor.WithBacking(yBack), tensor.WithShape(vecSize))
return
}
func random(dt tensor.Dtype) interface{} {
rand.Seed(13370)
switch dt {
case tensor.Float32:
return rand.Float32()
case tensor.Float64:
return rand.Float64()
default:
panic("Unhandled dtype")
}
}
func linregSetup(Float tensor.Dtype) (m, c *Node, machine VM) {
var xT, yT Value
xT, yT = xy(Float)
g := NewGraph()
x := NewVector(g, Float, WithShape(vecSize), WithName("x"), WithValue(xT))
y := NewVector(g, Float, WithShape(vecSize), WithName("y"), WithValue(yT))
m = NewScalar(g, Float, WithName("m"), WithValue(random(Float)))
c = NewScalar(g, Float, WithName("c"), WithValue(random(Float)))
pred := Must(Add(Must(Mul(x, m)), c))
se := Must(Square(Must(Sub(pred, y))))
cost := Must(Mean(se))
if _, err := Grad(cost, m, c); err != nil {
log.Fatalf("Failed to backpropagate: %v", err)
}
// machine := NewLispMachine(g) // you can use a LispMachine, but it'll be VERY slow.
machine = NewTapeMachine(g, BindDualValues(m, c))
return m, c, machine
}
func linregRun(m, c *Node, machine VM, iter int, autoCleanup bool) (retM, retC Value) {
if autoCleanup {
defer machine.Close()
}
model := []ValueGrad{m, c}
solver := NewVanillaSolver(WithLearnRate(0.001), WithClip(5)) // good idea to clip
if CUDA {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
}
var err error
for i := 0; i < iter; i++ {
if err = machine.RunAll(); err != nil {
fmt.Printf("Error during iteration: %v: %v\n", i, err)
break
}
if err = solver.Step(model); err != nil {
log.Fatal(err)
}
machine.Reset() // Reset is necessary in a loop like this
}
return m.Value(), c.Value()
}
func linearRegression(Float tensor.Dtype, iter int) (retM, retC Value) {
defer runtime.GC()
m, c, machine := linregSetup(Float)
return linregRun(m, c, machine, iter, true)
}
// Linear Regression Example
//
// The formula for a straight line is
// y = mx + c
// We want to find an `m` and a `c` that fits the equation well. We'll do it in both float32 and float64 to showcase the extensibility of Gorgonia
func Example_linearRegression() {
var m, c Value
// Float32
m, c = linearRegression(Float32, 500)
fmt.Printf("float32: y = %3.3fx + %3.3f\n", m, c)
// Float64
m, c = linearRegression(Float64, 500)
fmt.Printf("float64: y = %3.3fx + %3.3f\n", m, c)
// Output:
// float32: y = 2.001x + 2.001
// float64: y = 2.001x + 2.001
}