-
Notifications
You must be signed in to change notification settings - Fork 0
/
gosor.go
135 lines (110 loc) · 2.59 KB
/
gosor.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
package gosor
import "fmt"
type Tensor struct {
*GradientTracker
storage []float64
strides []int
sizes []int
offset int
}
func (t *Tensor) Items() []float64 {
if len(t.sizes) != len(t.strides) {
panic("items on invalid tensor")
}
size := 1
for _, i := range t.sizes {
size *= i
}
items := make([]float64, size)
for i := 0; i < size; i++ {
index := t.offset
v := i
for j := len(t.sizes) - 1; j >= 0; j-- {
dimensionIndex := v % t.sizes[j]
v /= t.sizes[j]
index += dimensionIndex * t.strides[j]
}
items[i] = t.storage[index]
}
return items
}
func (t *Tensor) Item() float64 {
return t.storage[t.offset]
}
// Len returns the length of first dimension.
func (t *Tensor) Len() int {
if len(t.sizes) == 0 {
return 0
}
return t.sizes[0]
}
// Size returns the size of the tensor.
func (t *Tensor) Size() []int {
return t.sizes
}
// ShallowCopy copies everything except the underling storage and gradient
// tracker of the tensor.
func (t *Tensor) ShallowCopy() *Tensor {
strides := make([]int, len(t.strides))
copy(strides, t.strides)
sizes := make([]int, len(t.sizes))
copy(sizes, t.sizes)
return &Tensor{strides: strides, sizes: sizes, offset: t.offset, storage: t.storage}
}
// DeepCopy creates a new tensor with the same size and copied values.
func (t *Tensor) DeepCopy() (*Tensor, error) {
return New(WithSize(t.sizes...), WithValues(t.Items()...))
}
func (t *Tensor) String() string {
r := "tensor["
for i := 0; i < len(t.sizes); i++ {
r += fmt.Sprint(t.sizes[i])
if i+1 != len(t.sizes) {
r += "*"
}
}
r += "]{ "
for _, item := range t.Items() {
r += fmt.Sprint(item, " ")
}
return r + "}"
}
func Map(t *Tensor, f func(f float64) float64) (*Tensor, error) {
size := 1
for _, i := range t.sizes {
size *= i
}
storage := make([]float64, size)
for i := 0; i < len(storage); i++ {
storage[i] = f(t.storage[t.getStorageIndex(i)])
}
return New(withIsNotLeaf(), WithSize(t.sizes...), WithValues(storage...))
}
func Sum(t *Tensor) (*Tensor, error) {
resSize := []int{1}
if len(t.sizes) > 1 {
resSize = t.sizes[1:]
}
res, err := New(WithSize(resSize...), WithRecordGradients())
if err != nil {
return nil, err
}
for i := 0; i < t.Len(); i++ {
cur, err := t.Index(Index(i))
if err != nil {
return nil, err
}
_, err = AddInto(res, res, cur)
if err != nil {
return nil, err
}
}
res.isNotLeaf = true
addGradientTracker(res, []*Tensor{t}, func() ([]*Tensor, error) {
g := Wrap(New(WithSize(t.sizes...)))
g.DoInto(g, AddInto, Wrap(New(WithValues(1))))
grad, err := g.Value()
return []*Tensor{grad}, err
})
return res, nil
}