Skip to content

Commit

Permalink
significant API change: added data parallel index to Unit value metho…
Browse files Browse the repository at this point in the history
…ds on emer.Layer, and added support for selecting which data index to view in NetView -- will require Leabra updates
  • Loading branch information
rcoreilly committed May 25, 2023
1 parent 8a0e781 commit 3adbf34
Show file tree
Hide file tree
Showing 12 changed files with 127 additions and 68 deletions.
2 changes: 1 addition & 1 deletion decoder/softmax.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ func (sm *SoftMax) Input(varNm string) {
off := 0
for _, ly := range sm.Layers {
tsr := sm.ValsTsr(ly.Name())
ly.UnitValsTensor(tsr, varNm)
ly.UnitValsTensor(tsr, varNm, 0)
for j, v := range tsr.Values {
sm.Inputs[off+j] = v
}
Expand Down
2 changes: 1 addition & 1 deletion egui/netview.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func (gui *GUI) UpdateNetViewWhenStopped() {
// when the GUI is not active
func (gui *GUI) InitNetData(net emer.Network, nrecs int) {
gui.NetData = &netview.NetData{}
gui.NetData.Init(net, nrecs, true) // true = NoSynData
gui.NetData.Init(net, nrecs, true, 1) // true = NoSynData, 1 = MaxData
}

// NetDataRecord records current netview data
Expand Down
22 changes: 12 additions & 10 deletions elog/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -217,31 +217,33 @@ func (ctx *Context) Layer(layNm string) emer.Layer {
}

// GetLayerTensor gets tensor of Unit values on a layer for given variable
func (ctx *Context) GetLayerTensor(layNm, unitVar string) *etensor.Float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (ctx *Context) GetLayerTensor(layNm, unitVar string, di int) *etensor.Float32 {
ly := ctx.Layer(layNm)
tsr := ctx.Stats.F32Tensor(layNm)
ly.UnitValsTensor(tsr, unitVar)
ly.UnitValsTensor(tsr, unitVar, di)
return tsr
}

// GetLayerRepTensor gets tensor of representative Unit values on a layer for given variable
func (ctx *Context) GetLayerRepTensor(layNm, unitVar string) *etensor.Float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (ctx *Context) GetLayerRepTensor(layNm, unitVar string, di int) *etensor.Float32 {
ly := ctx.Layer(layNm)
tsr := ctx.Stats.F32Tensor(layNm)
ly.UnitValsRepTensor(tsr, unitVar)
ly.UnitValsRepTensor(tsr, unitVar, di)
return tsr
}

// SetLayerTensor sets tensor of Unit values on a layer for given variable
func (ctx *Context) SetLayerTensor(layNm, unitVar string) *etensor.Float32 {
tsr := ctx.GetLayerTensor(layNm, unitVar)
func (ctx *Context) SetLayerTensor(layNm, unitVar string, di int) *etensor.Float32 {
tsr := ctx.GetLayerTensor(layNm, unitVar, di)
ctx.SetTensor(tsr)
return tsr
}

// SetLayerRepTensor sets tensor of representative Unit values on a layer for given variable
func (ctx *Context) SetLayerRepTensor(layNm, unitVar string) *etensor.Float32 {
tsr := ctx.GetLayerRepTensor(layNm, unitVar)
func (ctx *Context) SetLayerRepTensor(layNm, unitVar string, di int) *etensor.Float32 {
tsr := ctx.GetLayerRepTensor(layNm, unitVar, di)
ctx.SetTensor(tsr)
return tsr
}
Expand All @@ -250,8 +252,8 @@ func (ctx *Context) SetLayerRepTensor(layNm, unitVar string) *etensor.Float32 {
// given layer activation pattern using given variable. Returns the row number,
// correlation value, and value of a column named namecol for that row if non-empty.
// Column must be etensor.Float32
func (ctx *Context) ClosestPat(layNm, unitVar string, pats *etable.Table, colnm, namecol string) (int, float32, string) {
tsr := ctx.SetLayerTensor(layNm, unitVar)
func (ctx *Context) ClosestPat(layNm, unitVar string, di int, pats *etable.Table, colnm, namecol string) (int, float32, string) {
tsr := ctx.SetLayerTensor(layNm, unitVar, di)
col := pats.ColByName(colnm)
// note: requires Increasing metric so using Inv
row, cor := metric.ClosestRow32(tsr, col.(*etensor.Float32), metric.InvCorrelation32)
Expand Down
7 changes: 4 additions & 3 deletions elog/stditems.go
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,8 @@ func (lg *Logs) RunStats(stats ...string) {
// classes of layers, mode and time (e.g., Test, Trial).
// If another item already exists for a different mode / time, this is added
// to it so there aren't any duplicate items.
func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.Modes, etm etime.Times, layClasses ...string) {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, di int, mode etime.Modes, etm etime.Times, layClasses ...string) {
layers := net.LayersByClass(layClasses...)
for _, lnm := range layers {
clnm := lnm
Expand All @@ -341,7 +342,7 @@ func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.M
itm, has := lg.ItemByName(itmNm)
if has {
itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) {
ctx.SetLayerRepTensor(clnm, varNm)
ctx.SetLayerRepTensor(clnm, varNm, di)
}
} else {
lg.AddItem(&Item{
Expand All @@ -352,7 +353,7 @@ func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.M
Range: minmax.F64{Max: 1},
Write: WriteMap{
etime.Scope(mode, etm): func(ctx *Context) {
ctx.SetLayerRepTensor(clnm, varNm)
ctx.SetLayerRepTensor(clnm, varNm, di)
}}})
}
}
Expand Down
18 changes: 12 additions & 6 deletions emer/layer.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,26 +129,31 @@ type Layer interface {
// for this layer. This is needed for extending indexes in derived types.
UnitVarNum() int

// UnitVal1D returns value of given variable index on given unit, using 1-dimensional index.
// UnitVal1D returns value of given variable index on given unit,
// using 1-dimensional index, and a data parallel index di,
// for networks capable of processing multiple input patterns in parallel.
// returns NaN on invalid index.
// This is the core unit var access method used by other methods,
// so it is the only one that needs to be updated for derived layer types.
UnitVal1D(varIdx int, idx int) float32
UnitVal1D(varIdx int, idx, di int) float32

// UnitVals fills in values of given variable name on unit,
// for each unit in the layer, into given float32 slice (only resized if not big enough).
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
// Returns error on invalid var name.
UnitVals(vals *[]float32, varNm string) error
UnitVals(vals *[]float32, varNm string, di int) error

// UnitValsTensor fills in values of given variable name on unit
// for each unit in the layer, into given tensor.
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
// If tensor is not already big enough to hold the values, it is
// set to the same shape as the layer.
// Returns error on invalid var name.
UnitValsTensor(tsr etensor.Tensor, varNm string) error
UnitValsTensor(tsr etensor.Tensor, varNm string, di int) error

// UnitValsRepTensor fills in values of given variable name on unit
// for a smaller subset of representative units in the layer, into given tensor.
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
// This is used for computationally intensive stats or displays that work
// much better with a smaller number of units.
// The set of representative units are defined by SetRepIdxs -- all units
Expand All @@ -157,7 +162,7 @@ type Layer interface {
// set to RepShape to hold all the values if subset is defined,
// otherwise it calls UnitValsTensor and is identical to that.
// Returns error on invalid var name.
UnitValsRepTensor(tsr etensor.Tensor, varNm string) error
UnitValsRepTensor(tsr etensor.Tensor, varNm string, di int) error

// RepIdxs returns the current set of representative unit indexes.
// which are a smaller subset of units that represent the behavior
Expand All @@ -182,7 +187,8 @@ type Layer interface {
// UnitVal returns value of given variable name on given unit,
// using shape-based dimensional index.
// Returns NaN on invalid var name or index.
UnitVal(varNm string, idx []int) float32
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
UnitVal(varNm string, idx []int, di int) float32

// NRecvPrjns returns the number of receiving projections
NRecvPrjns() int
Expand Down
14 changes: 8 additions & 6 deletions estats/actrf.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ import (
// and 'Source' is either the name of another layer (checked first)
// or the name of a tensor stored in F32Tensors (if layer name not found).
// If Source is not a layer, it must be populated prior to these calls.
func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string, di int) error {
var err error
for _, anm := range arfs {
sp := strings.Split(anm, ":")
Expand All @@ -32,12 +33,12 @@ func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error
fmt.Printf("estats.InitActRFs: %s\n", err)
continue
}
lvt := st.SetLayerRepTensor(net, lnm, varnm)
lvt := st.SetLayerRepTensor(net, lnm, varnm, di)
tnm := sp[1]
var tvt *etensor.Float32
_, err = net.LayerByNameTry(tnm)
if err == nil {
tvt = st.SetLayerRepTensor(net, tnm, varnm)
tvt = st.SetLayerRepTensor(net, tnm, varnm, di)
} else {
ok := false
tvt, ok = st.F32Tensors[tnm]
Expand All @@ -58,7 +59,8 @@ func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error
// Must have called InitActRFs first -- see it for documentation.
// Uses RFs configured then, grabbing network values from variable
// varnm, and given threshold (0.01 recommended)
func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32) {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32, di int) {
for _, rf := range st.ActRFs.RFs {
anm := rf.Name
sp := strings.Split(anm, ":")
Expand All @@ -67,12 +69,12 @@ func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32) {
if err != nil {
continue
}
lvt := st.SetLayerRepTensor(net, lnm, varnm)
lvt := st.SetLayerRepTensor(net, lnm, varnm, di)
tnm := sp[1]
var tvt *etensor.Float32
_, err = net.LayerByNameTry(tnm)
if err == nil {
tvt = st.SetLayerRepTensor(net, tnm, varnm)
tvt = st.SetLayerRepTensor(net, tnm, varnm, di)
} else { // random state
tvt = st.F32Tensor(tnm)
}
Expand Down
29 changes: 17 additions & 12 deletions estats/funcs.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,49 +16,54 @@ import (

// SetLayerTensor sets tensor of Unit values on a layer for given variable
// to a F32Tensor with name = layNm
func (st *Stats) SetLayerTensor(net emer.Network, layNm, unitVar string) *etensor.Float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) SetLayerTensor(net emer.Network, layNm, unitVar string, di int) *etensor.Float32 {
ly := net.LayerByName(layNm)
tsr := st.F32Tensor(layNm)
ly.UnitValsTensor(tsr, unitVar)
ly.UnitValsTensor(tsr, unitVar, di)
return tsr
}

// SetLayerRepTensor sets tensor of representative Unit values on a layer
// for given variable to a F32Tensor with name = layNm
func (st *Stats) SetLayerRepTensor(net emer.Network, layNm, unitVar string) *etensor.Float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) SetLayerRepTensor(net emer.Network, layNm, unitVar string, di int) *etensor.Float32 {
ly := net.LayerByName(layNm)
tsr := st.F32Tensor(layNm)
ly.UnitValsRepTensor(tsr, unitVar)
ly.UnitValsRepTensor(tsr, unitVar, di)
return tsr
}

// LayerVarsCorrel returns the correlation between two variables on a given layer
func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB string) float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 {
ly := net.LayerByName(layNm)
tsrA := st.F32Tensor(layNm) // standard re-used storage tensor
ly.UnitValsTensor(tsrA, unitVarA)
ly.UnitValsTensor(tsrA, unitVarA, di)
tsrB := st.F32Tensor(layNm + "_alt") // alternative storage tensor
ly.UnitValsTensor(tsrB, unitVarB)
ly.UnitValsTensor(tsrB, unitVarB, di)
return metric.Correlation32(tsrA.Values, tsrB.Values)
}

// LayerVarsCorrelRep returns the correlation between two variables on a given layer
// Rep version uses representative units.
func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB string) float32 {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 {
ly := net.LayerByName(layNm)
tsrA := st.F32Tensor(layNm) // standard re-used storage tensor
ly.UnitValsRepTensor(tsrA, unitVarA)
ly.UnitValsRepTensor(tsrA, unitVarA, di)
tsrB := st.F32Tensor(layNm + "_alt") // alternative storage tensor
ly.UnitValsRepTensor(tsrB, unitVarB)
ly.UnitValsRepTensor(tsrB, unitVarB, di)
return metric.Correlation32(tsrA.Values, tsrB.Values)
}

// ClosestStat finds the closest pattern in given column of given table of possible patterns,
// compared to layer activation pattern using given variable. Returns the row number,
// correlation value, and value of a column named namecol for that row if non-empty.
// Column must be etensor.Float32
func (st *Stats) ClosestPat(net emer.Network, layNm, unitVar string, pats *etable.Table, colnm, namecol string) (int, float32, string) {
tsr := st.SetLayerTensor(net, layNm, unitVar)
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) ClosestPat(net emer.Network, layNm, unitVar string, di int, pats *etable.Table, colnm, namecol string) (int, float32, string) {
tsr := st.SetLayerTensor(net, layNm, unitVar, di)
col := pats.ColByName(colnm)
// note: requires Increasing metric so using Inv
row, cor := metric.ClosestRow32(tsr, col.(*etensor.Float32), metric.InvCorrelation32)
Expand Down
5 changes: 3 additions & 2 deletions estats/rasters.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,10 @@ func (st *Stats) SetRasterCol(sr, tsr *etensor.Float32, col int) {

// RasterRec records data from layers configured with ConfigRasters
// using variable name, for given cycle number (X axis index)
func (st *Stats) RasterRec(net emer.Network, cyc int, varNm string) {
// di is a data parallel index di, for networks capable of processing input patterns in parallel.
func (st *Stats) RasterRec(net emer.Network, cyc int, varNm string, di int) {
for _, lnm := range st.Rasters {
tsr := st.SetLayerRepTensor(net, lnm, varNm)
tsr := st.SetLayerRepTensor(net, lnm, varNm, di)
sr := st.F32Tensor("Raster_" + lnm)
if sr.Dim(1) <= cyc {
continue
Expand Down

0 comments on commit 3adbf34

Please sign in to comment.