Skip to content

Commit

Permalink
improve naming and update to core and etable changes
Browse files Browse the repository at this point in the history
  • Loading branch information
kkoreilly committed Apr 6, 2024
1 parent 50e40bf commit 3a9df88
Show file tree
Hide file tree
Showing 65 changed files with 474 additions and 477 deletions.
4 changes: 2 additions & 2 deletions actrf/actrf.go
Expand Up @@ -102,14 +102,14 @@ func (af *RF) Add(act, src etensor.Tensor, thr float32) {
aNy, aNx, sNy, sNx := shp[0], shp[1], shp[2], shp[3]
for sy := 0; sy < sNy; sy++ {
for sx := 0; sx < sNx; sx++ {
tv := float32(etensor.Prjn2DVal(src, false, sy, sx))
tv := float32(etensor.Prjn2DValue(src, false, sy, sx))
if tv < thr {
continue
}
af.SumSrc.AddScalar([]int{sy, sx}, tv)
for ay := 0; ay < aNy; ay++ {
for ax := 0; ax < aNx; ax++ {
av := float32(etensor.Prjn2DVal(act, false, ay, ax))
av := float32(etensor.Prjn2DValue(act, false, ay, ax))
af.SumProd.AddScalar([]int{ay, ax, sy, sx}, av*tv)
}
}
Expand Down
4 changes: 2 additions & 2 deletions actrf/running.go
Expand Up @@ -20,10 +20,10 @@ func RunningAvg(out *etensor.Float32, act, src etensor.Tensor, tau float32) {
out.SetShape(oshp, nil, []string{"ActY", "ActX", "SrcY", "SrcX"})
for ay := 0; ay < aNy; ay++ {
for ax := 0; ax < aNx; ax++ {
av := float32(etensor.Prjn2DVal(act, false, ay, ax))
av := float32(etensor.Prjn2DValue(act, false, ay, ax))
for ty := 0; ty < tNy; ty++ {
for tx := 0; tx < tNx; tx++ {
tv := float32(etensor.Prjn2DVal(src, false, ty, tx))
tv := float32(etensor.Prjn2DValue(src, false, ty, tx))
oi := []int{ay, ax, ty, tx}
oo := out.Offset(oi)
ov := out.Values[oo]
Expand Down
26 changes: 13 additions & 13 deletions confusion/confusion.go
Expand Up @@ -126,16 +126,16 @@ func (cm *Matrix) SumTFPN(class int) {
for c := 0; c < n; c++ {
for r := 0; r < n; r++ {
if r == class && c == class { // True Positive
v := cm.Sum.FloatValRowCell(r, c)
v := cm.Sum.FloatValueRowCell(r, c)
cm.TFPN.SetFloatRowCell(class, 0, v)
} else if r == class && c != class { // False Positive
fn += cm.Sum.FloatValRowCell(r, c)
fn += cm.Sum.FloatValueRowCell(r, c)
cm.TFPN.SetFloatRowCell(class, 1, fp)
} else if r != class && c == class { // False Negative
fp += cm.Sum.FloatValRowCell(r, c)
fp += cm.Sum.FloatValueRowCell(r, c)
cm.TFPN.SetFloatRowCell(class, 2, fn)
} else { // True Negative
tn += cm.Sum.FloatValRowCell(r, c)
tn += cm.Sum.FloatValueRowCell(r, c)
cm.TFPN.SetFloatRowCell(class, 3, tn)
}
}
Expand All @@ -146,9 +146,9 @@ func (cm *Matrix) SumTFPN(class int) {
}

func (cm *Matrix) ScoreClass(class int) {
tp := cm.TFPN.FloatValRowCell(class, 0)
fp := cm.TFPN.FloatValRowCell(class, 1)
fn := cm.TFPN.FloatValRowCell(class, 2)
tp := cm.TFPN.FloatValueRowCell(class, 0)
fp := cm.TFPN.FloatValueRowCell(class, 1)
fn := cm.TFPN.FloatValueRowCell(class, 2)

precision := tp / (tp + fp)
cm.ClassScores.SetFloatRowCell(class, 0, precision)
Expand All @@ -165,9 +165,9 @@ func (cm *Matrix) ScoreMatrix() {

n := cm.N.Len()
for i := 0; i < n; i++ {
tp += cm.TFPN.FloatValRowCell(i, 0)
fp += cm.TFPN.FloatValRowCell(i, 1)
fn += cm.TFPN.FloatValRowCell(i, 2)
tp += cm.TFPN.FloatValueRowCell(i, 0)
fp += cm.TFPN.FloatValueRowCell(i, 1)
fn += cm.TFPN.FloatValueRowCell(i, 2)
}

// micro F1 - ignores class
Expand All @@ -178,7 +178,7 @@ func (cm *Matrix) ScoreMatrix() {
// some classes might not have any instances so check NaN
f1 = 0.0
for i := 0; i < n; i++ {
classf1 := cm.ClassScores.FloatValRowCell(i, 2)
classf1 := cm.ClassScores.FloatValueRowCell(i, 2)
if math.IsNaN(classf1) == false {
f1 += classf1
}
Expand All @@ -190,11 +190,11 @@ func (cm *Matrix) ScoreMatrix() {
f1 = 0.0
totalN := 0.0
for i := 0; i < n; i++ {
classf1 := cm.ClassScores.FloatValRowCell(i, 2) * cm.N.FloatVal1D(i)
classf1 := cm.ClassScores.FloatValueRowCell(i, 2) * cm.N.FloatValue1D(i)
if math.IsNaN(classf1) == false {
f1 += classf1
}
totalN += cm.N.FloatVal1D(i)
totalN += cm.N.FloatValue1D(i)
}
cm.MatrixScores.SetFloat1D(2, f1/totalN)
}
Expand Down
4 changes: 2 additions & 2 deletions decoder/gtigen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 10 additions & 10 deletions decoder/linear.go
Expand Up @@ -40,7 +40,7 @@ type Linear struct {
Inputs []float32

// for holding layer values
ValsTsrs map[string]*etensor.Float32 `view:"-"`
ValuesTsrs map[string]*etensor.Float32 `view:"-"`

// synaptic weights: outer loop is units, inner loop is inputs
Weights etensor.Float32
Expand All @@ -61,7 +61,7 @@ type Linear struct {
// Layer is the subset of emer.Layer that is used by this code
type Layer interface {
Name() string
UnitValsTensor(tsr etensor.Tensor, varNm string, di int) error
UnitValuesTensor(tsr etensor.Tensor, varNm string, di int) error
Shape() *etensor.Shape
}

Expand Down Expand Up @@ -182,15 +182,15 @@ func (dec *Linear) SetTargets(targs []float32) error {
return nil
}

// ValsTsr gets value tensor of given name, creating if not yet made
func (dec *Linear) ValsTsr(name string) *etensor.Float32 {
if dec.ValsTsrs == nil {
dec.ValsTsrs = make(map[string]*etensor.Float32)
// ValuesTsr gets value tensor of given name, creating if not yet made
func (dec *Linear) ValuesTsr(name string) *etensor.Float32 {
if dec.ValuesTsrs == nil {
dec.ValuesTsrs = make(map[string]*etensor.Float32)
}
tsr, ok := dec.ValsTsrs[name]
tsr, ok := dec.ValuesTsrs[name]
if !ok {
tsr = &etensor.Float32{}
dec.ValsTsrs[name] = tsr
dec.ValuesTsrs[name] = tsr
}
return tsr
}
Expand All @@ -201,8 +201,8 @@ func (dec *Linear) ValsTsr(name string) *etensor.Float32 {
func (dec *Linear) Input(varNm string, di int) {
off := 0
for _, ly := range dec.Layers {
tsr := dec.ValsTsr(ly.Name())
ly.UnitValsTensor(tsr, varNm, di)
tsr := dec.ValuesTsr(ly.Name())
ly.UnitValuesTensor(tsr, varNm, di)
if dec.PoolIndex >= 0 {
shape := ly.Shape()
y := dec.PoolIndex / shape.Dim(1)
Expand Down
2 changes: 1 addition & 1 deletion decoder/linear_test.go
Expand Up @@ -21,7 +21,7 @@ func (tl *TestLayer) Name() string {
return "TestLayer"
}

func (tl *TestLayer) UnitValsTensor(tsr etensor.Tensor, varNm string, di int) error {
func (tl *TestLayer) UnitValuesTensor(tsr etensor.Tensor, varNm string, di int) error {
src, ok := tl.tensors[varNm]
if !ok {
return fmt.Errorf("bad key: %s", varNm)
Expand Down
18 changes: 9 additions & 9 deletions decoder/softmax.go
Expand Up @@ -50,7 +50,7 @@ type SoftMax struct {
Target int

// for holding layer values
ValsTsrs map[string]*etensor.Float32 `view:"-"`
ValuesTsrs map[string]*etensor.Float32 `view:"-"`

// synaptic weights: outer loop is units, inner loop is inputs
Weights etensor.Float32
Expand Down Expand Up @@ -124,15 +124,15 @@ func (sm *SoftMax) TrainMPI(targ int) {
sm.BackMPI()
}

// ValsTsr gets value tensor of given name, creating if not yet made
func (sm *SoftMax) ValsTsr(name string) *etensor.Float32 {
if sm.ValsTsrs == nil {
sm.ValsTsrs = make(map[string]*etensor.Float32)
// ValuesTsr gets value tensor of given name, creating if not yet made
func (sm *SoftMax) ValuesTsr(name string) *etensor.Float32 {
if sm.ValuesTsrs == nil {
sm.ValuesTsrs = make(map[string]*etensor.Float32)
}
tsr, ok := sm.ValsTsrs[name]
tsr, ok := sm.ValuesTsrs[name]
if !ok {
tsr = &etensor.Float32{}
sm.ValsTsrs[name] = tsr
sm.ValuesTsrs[name] = tsr
}
return tsr
}
Expand All @@ -143,8 +143,8 @@ func (sm *SoftMax) ValsTsr(name string) *etensor.Float32 {
func (sm *SoftMax) Input(varNm string, di int) {
off := 0
for _, ly := range sm.Layers {
tsr := sm.ValsTsr(ly.Name())
ly.UnitValsTensor(tsr, varNm, di)
tsr := sm.ValuesTsr(ly.Name())
ly.UnitValuesTensor(tsr, varNm, di)
for j, v := range tsr.Values {
sm.Inputs[off+j] = v
}
Expand Down
2 changes: 1 addition & 1 deletion egui/gtigen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions egui/gui.go
Expand Up @@ -41,7 +41,7 @@ type GUI struct {
Grids map[string]*etview.TensorGrid

// the view update for managing updates of netview
ViewUpdt *netview.ViewUpdt `view:"-"`
ViewUpdate *netview.ViewUpdate `view:"-"`

// net data for recording in nogui mode, if !nil
NetData *netview.NetData `view:"-"`
Expand Down Expand Up @@ -90,7 +90,7 @@ func (gui *GUI) Stopped() {
if gui.Body == nil {
return
}
if gui.ViewUpdt != nil {
if gui.ViewUpdate != nil {
gui.UpdateNetViewWhenStopped()
}
gui.GoUpdateWindow()
Expand Down
8 changes: 4 additions & 4 deletions egui/netview.go
Expand Up @@ -12,16 +12,16 @@ import (

// UpdateNetView updates the gui visualization of the network.
func (gui *GUI) UpdateNetView() {
if gui.ViewUpdt != nil {
gui.ViewUpdt.Update()
if gui.ViewUpdate != nil {
gui.ViewUpdate.Update()
}
}

// UpdateNetViewWhenStopped updates the gui visualization of the network.
// when stopped either via stepping or user hitting stop button.
func (gui *GUI) UpdateNetViewWhenStopped() {
if gui.ViewUpdt != nil {
gui.ViewUpdt.UpdateWhenStopped()
if gui.ViewUpdate != nil {
gui.ViewUpdate.UpdateWhenStopped()
}
}

Expand Down
2 changes: 1 addition & 1 deletion egui/plots.go
Expand Up @@ -55,7 +55,7 @@ func ConfigPlotFromLog(title string, plt *eplot.Plot2D, lg *elog.Logs, key etime
if item.Color != "" {
cp.Color = grr.Log1(colors.FromString(item.Color, nil))
}
cp.TensorIdx = item.TensorIdx
cp.TensorIndex = item.TensorIndex
cp.ErrCol = item.ErrCol

plt.Params.Title = title + " " + time + " Plot"
Expand Down
10 changes: 5 additions & 5 deletions elog/README.md
Expand Up @@ -132,7 +132,7 @@ There are various additional analysis functions called here, for example this on
func (ss *Sim) LogRunStats() {
sk := etime.Scope(etime.Train, etime.Run)
lt := ss.Logs.TableDetailsScope(sk)
ix, _ := lt.NamedIdxView("RunStats")
ix, _ := lt.NamedIndexView("RunStats")

spl := split.GroupBy(ix, []string{"Params"})
split.Desc(spl, "FirstZero")
Expand Down Expand Up @@ -308,7 +308,7 @@ Computing stats on the principal components of variance (PCA) across different i
// PCAStats computes PCA statistics on recorded hidden activation patterns
// from Analyze, Trial log data
func (ss *Sim) PCAStats() {
ss.Stats.PCAStats(ss.Logs.IdxView(etime.Analyze, etime.Trial), "ActM", ss.Net.LayersByClass("Hidden"))
ss.Stats.PCAStats(ss.Logs.IndexView(etime.Analyze, etime.Trial), "ActM", ss.Net.LayersByClass("Hidden"))
ss.Logs.ResetLog(etime.Analyze, etime.Trial)
}
```
Expand Down Expand Up @@ -348,7 +348,7 @@ Here's how you record the data and log the resulting stats, using the `Analyze`
## Error by Input Category
This item creates a tensor column that records the average error for each category of input stimulus (e.g., for images from object categories), using the `split.GroupBy` function for `etable`. The `IdxView` function (see also `NamedIdxView`) automatically manages the `etable.IdxView` indexed view onto a log table, which is used for all aggregation and further analysis of data, so that you can efficiently analyze filtered subsets of the original data.
This item creates a tensor column that records the average error for each category of input stimulus (e.g., for images from object categories), using the `split.GroupBy` function for `etable`. The `IndexView` function (see also `NamedIndexView`) automatically manages the `etable.IndexView` indexed view onto a log table, which is used for all aggregation and further analysis of data, so that you can efficiently analyze filtered subsets of the original data.
```Go
ss.Logs.AddItem(&elog.Item{
Expand All @@ -358,10 +358,10 @@ This item creates a tensor column that records the average error for each catego
DimNames: []string{"Cat"},
Plot: true,
Range: minmax.F64{Min: 0},
TensorIdx: -1, // plot all values
TensorIndex: -1, // plot all values
Write: elog.WriteMap{
etime.Scope(etime.Test, etime.Epoch): func(ctx *elog.Context) {
ix := ctx.Logs.IdxView(etime.Test, etime.Trial)
ix := ctx.Logs.IndexView(etime.Test, etime.Trial)
spl := split.GroupBy(ix, []string{"Cat"})
split.AggTry(spl, "Err", agg.AggMean)
cats := spl.AggsToTable(etable.ColNameOnly)
Expand Down

0 comments on commit 3a9df88

Please sign in to comment.