-
Notifications
You must be signed in to change notification settings - Fork 69
/
regression.go
345 lines (301 loc) · 9.07 KB
/
regression.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
package regression
import (
"errors"
"fmt"
"math"
"strconv"
"strings"
"gonum.org/v1/gonum/mat"
)
var (
// ErrNotEnoughData signals that there weren't enough datapoint to train the model.
ErrNotEnoughData = errors.New("not enough data points")
// ErrTooManyVars signals that there are too many variables for the number of observations being made.
ErrTooManyVars = errors.New("not enough observations to support this many variables")
// ErrRegressionRun signals that the Run method has already been called on the trained dataset.
ErrRegressionRun = errors.New("regression has already been run")
)
// Regression is the exposed data structure for interacting with the API.
type Regression struct {
names describe
data []*dataPoint
coeff map[int]float64
R2 float64
Varianceobserved float64
VariancePredicted float64
initialised bool
Formula string
crosses []featureCross
hasRun bool
}
type dataPoint struct {
Observed float64
Variables []float64
Predicted float64
Error float64
}
type describe struct {
obs string
vars map[int]string
}
// DataPoints is a slice of *dataPoint
// This type allows for easier construction of training data points.
type DataPoints []*dataPoint
// DataPoint creates a well formed *datapoint used for training.
func DataPoint(obs float64, vars []float64) *dataPoint {
return &dataPoint{Observed: obs, Variables: vars}
}
// Predict updates the "Predicted" value for the inputed features.
func (r *Regression) Predict(vars []float64) (float64, error) {
if !r.initialised {
return 0, ErrNotEnoughData
}
// apply any features crosses to vars
for _, cross := range r.crosses {
vars = append(vars, cross.Calculate(vars)...)
}
p := r.Coeff(0)
for j := 1; j < len(r.data[0].Variables)+1; j++ {
p += r.Coeff(j) * vars[j-1]
}
return p, nil
}
// SetObserved sets the name of the observed value.
func (r *Regression) SetObserved(name string) {
r.names.obs = name
}
// GetObserved gets the name of the observed value.
func (r *Regression) GetObserved() string {
return r.names.obs
}
// SetVar sets the name of variable i.
func (r *Regression) SetVar(i int, name string) {
if len(r.names.vars) == 0 {
r.names.vars = make(map[int]string, 5)
}
r.names.vars[i] = name
}
// GetVar gets the name of variable i
func (r *Regression) GetVar(i int) string {
x := r.names.vars[i]
if x == "" {
s := []string{"X", strconv.Itoa(i)}
return strings.Join(s, "")
}
return x
}
// AddCross registers a feature cross to be applied to the data points.
func (r *Regression) AddCross(cross featureCross) {
r.crosses = append(r.crosses, cross)
}
// Train the regression with some data points.
func (r *Regression) Train(d ...*dataPoint) {
r.data = append(r.data, d...)
if len(r.data) > 2 {
r.initialised = true
}
}
// Apply any feature crosses, generating new observations and updating the data points, as well as
// populating variable names for the feature crosses.
// this should only be run once, as part of Run().
func (r *Regression) applyCrosses() {
unusedVariableIndexCursor := len(r.data[0].Variables)
for _, point := range r.data {
for _, cross := range r.crosses {
point.Variables = append(point.Variables, cross.Calculate(point.Variables)...)
}
}
if len(r.names.vars) == 0 {
r.names.vars = make(map[int]string, 5)
}
for _, cross := range r.crosses {
unusedVariableIndexCursor += cross.ExtendNames(r.names.vars, unusedVariableIndexCursor)
}
}
// Run determines if there is enough data present to run the regression
// and whether or not the training has already been completed.
// Once the above checks have passed feature crosses are applied if any
// and the model is trained using QR decomposition.
func (r *Regression) Run() error {
if !r.initialised {
return ErrNotEnoughData
}
if r.hasRun {
return ErrRegressionRun
}
//apply any features crosses
r.applyCrosses()
r.hasRun = true
observations := len(r.data)
numOfvars := len(r.data[0].Variables)
if observations < (numOfvars + 1) {
return ErrTooManyVars
}
// Create some blank variable space
observed := mat.NewDense(observations, 1, nil)
variables := mat.NewDense(observations, numOfvars+1, nil)
for i := 0; i < observations; i++ {
observed.Set(i, 0, r.data[i].Observed)
for j := 0; j < numOfvars+1; j++ {
if j == 0 {
variables.Set(i, 0, 1)
} else {
variables.Set(i, j, r.data[i].Variables[j-1])
}
}
}
// Now run the regression
_, n := variables.Dims() // cols
qr := new(mat.QR)
qr.Factorize(variables)
q := new(mat.Dense)
reg := new(mat.Dense)
qr.QTo(q)
qr.RTo(reg)
qtr := q.T()
qty := new(mat.Dense)
qty.Mul(qtr, observed)
c := make([]float64, n)
for i := n - 1; i >= 0; i-- {
c[i] = qty.At(i, 0)
for j := i + 1; j < n; j++ {
c[i] -= c[j] * reg.At(i, j)
}
c[i] /= reg.At(i, i)
}
// Output the regression results
r.coeff = make(map[int]float64, numOfvars)
for i, val := range c {
r.coeff[i] = val
if i == 0 {
r.Formula = fmt.Sprintf("Predicted = %.4f", val)
} else {
r.Formula += fmt.Sprintf(" + %v*%.4f", r.GetVar(i-1), val)
}
}
r.calcPredicted()
r.calcVariance()
r.calcR2()
return nil
}
// Coeff returns the calculated coefficient for variable i.
func (r *Regression) Coeff(i int) float64 {
if len(r.coeff) == 0 {
return 0
}
return r.coeff[i]
}
// GetCoeffs returns the calculated coefficients. The element at index 0 is the offset.
func (r *Regression) GetCoeffs() []float64 {
if len(r.coeff) == 0 {
return nil
}
coeffs := make([]float64, len(r.coeff))
for i := range coeffs {
coeffs[i] = r.coeff[i]
}
return coeffs
}
func (r *Regression) calcPredicted() string {
observations := len(r.data)
var predicted float64
var output string
for i := 0; i < observations; i++ {
r.data[i].Predicted, _ = r.Predict(r.data[i].Variables)
r.data[i].Error = r.data[i].Predicted - r.data[i].Observed
output += fmt.Sprintf("%v. observed = %v, Predicted = %v, Error = %v", i, r.data[i].Observed, predicted, r.data[i].Error)
}
return output
}
func (r *Regression) calcVariance() string {
observations := len(r.data)
var obtotal, prtotal, obvar, prvar float64
for i := 0; i < observations; i++ {
obtotal += r.data[i].Observed
prtotal += r.data[i].Predicted
}
obaverage := obtotal / float64(observations)
praverage := prtotal / float64(observations)
for i := 0; i < observations; i++ {
obvar += math.Pow(r.data[i].Observed-obaverage, 2)
prvar += math.Pow(r.data[i].Predicted-praverage, 2)
}
r.Varianceobserved = obvar / float64(observations)
r.VariancePredicted = prvar / float64(observations)
return fmt.Sprintf("N = %v\nVariance observed = %v\nVariance Predicted = %v\n", observations, r.Varianceobserved, r.VariancePredicted)
}
func (r *Regression) calcR2() string {
r.R2 = r.VariancePredicted / r.Varianceobserved
return fmt.Sprintf("R2 = %.2f", r.R2)
}
func (r *Regression) calcResiduals() string {
str := fmt.Sprintf("Residuals:\nobserved|\tPredicted|\tResidual\n")
for _, d := range r.data {
str += fmt.Sprintf("%.2f|\t%.2f|\t%.2f\n", d.Observed, d.Predicted, d.Observed-d.Predicted)
}
str += "\n"
return str
}
// String satisfies the stringer interface to display a dataPoint as a string.
func (d *dataPoint) String() string {
str := fmt.Sprintf("%.2f", d.Observed)
for _, v := range d.Variables {
str += fmt.Sprintf("|\t%.2f", v)
}
return str
}
// String satisfies the stringer interface to display a regression as a string.
func (r *Regression) String() string {
if !r.initialised {
return ErrNotEnoughData.Error()
}
str := fmt.Sprintf("%v", r.GetObserved())
for i := 0; i < len(r.names.vars); i++ {
str += fmt.Sprintf("|\t%v", r.GetVar(i))
}
str += "\n"
for _, d := range r.data {
str += fmt.Sprintf("%v\n", d)
}
fmt.Println(r.calcResiduals())
str += fmt.Sprintf("\nN = %v\nVariance observed = %v\nVariance Predicted = %v", len(r.data), r.Varianceobserved, r.VariancePredicted)
str += fmt.Sprintf("\nR2 = %v\n", r.R2)
return str
}
// MakeDataPoints makes a `[]*dataPoint` from a `[][]float64`. The expected fomat for the input is a row-major [][]float64.
// That is to say the first slice represents a row, and the second represents the cols.
// Furthermore it is expected that all the col slices are of the same length.
// The obsIndex parameter indicates which column should be used
func MakeDataPoints(a [][]float64, obsIndex int) []*dataPoint {
if obsIndex != 0 && obsIndex != len(a[0])-1 {
return perverseMakeDataPoints(a, obsIndex)
}
retVal := make([]*dataPoint, 0, len(a))
if obsIndex == 0 {
for _, r := range a {
retVal = append(retVal, DataPoint(r[0], r[1:]))
}
return retVal
}
// otherwise the observation is expected to be the last col
last := len(a[0]) - 1
for _, r := range a {
retVal = append(retVal, DataPoint(r[last], r[:last]))
}
return retVal
}
func perverseMakeDataPoints(a [][]float64, obsIndex int) []*dataPoint {
retVal := make([]*dataPoint, 0, len(a))
for _, r := range a {
obs := r[obsIndex]
others := make([]float64, 0, len(r)-1)
for i, c := range r {
if i == obsIndex {
continue
}
others = append(others, c)
}
retVal = append(retVal, DataPoint(obs, others))
}
return retVal
}