Skip to content

Commit

Permalink
pt -> Pulv connections, added PT return layer to relevant functions; …
Browse files Browse the repository at this point in the history
…optimized pfcmaint params -- working very well now. all the other projects need significant updating.
  • Loading branch information
rcoreilly committed Apr 3, 2024
1 parent 5ebdca5 commit 2a49099
Show file tree
Hide file tree
Showing 8 changed files with 105 additions and 76 deletions.
10 changes: 6 additions & 4 deletions axon/act.go
Original file line number Diff line number Diff line change
Expand Up @@ -533,18 +533,20 @@ type SMaintParams struct {
NNeurons float32 `default:"10"`

// conductance multiplier for self maintenance synapses
Gbar float32 `default:"1"`
Gbar float32 `default:"0.2"`

pad float32
// inhib controls how much of the extra maintenance conductance goes to the GeExt, which drives extra proportional inhibition
Inhib float32 `default:"1"`

// ISI (inter spike interval) range -- min is used as min ISI for poisson spike rate expected from the population, and above max, no additional maintenance conductance is added
ISI minmax.F32 `view:"inline"`
}

func (sm *SMaintParams) Defaults() {
sm.NNeurons = 10
sm.ISI.Set(3, 20)
sm.Gbar = 1
sm.ISI.Set(1, 20)
sm.Gbar = 0.2
sm.Inhib = 1
}

func (sm *SMaintParams) Update() {
Expand Down
6 changes: 3 additions & 3 deletions axon/deep_layers.go
Original file line number Diff line number Diff line change
Expand Up @@ -183,11 +183,11 @@ func (ly *Layer) PTMaintDefaults() {
ly.Params.Acts.Decay.AHP = 0
ly.Params.Acts.Decay.OnRew.SetBool(true)
ly.Params.Acts.Sahp.Gbar = 0.01 // not much pressure -- long maint
ly.Params.Acts.GabaB.Gbar = 0.01 // needed for cons
ly.Params.Acts.GabaB.Gbar = 0.01 // needed for cons, good for smaint
ly.Params.Acts.Dend.ModGain = 1.5
ly.Params.Inhib.ActAvg.Nominal = 0.3 // very active
// ly.Params.Inhib.ActAvg.Nominal = 0.1 // normal
if ly.Is4D() {
ly.Params.Inhib.ActAvg.Nominal = 0.05
ly.Params.Inhib.ActAvg.Nominal = 0.02
}
ly.Params.Inhib.Layer.Gi = 2.4
ly.Params.Inhib.Pool.Gi = 2.4
Expand Down
35 changes: 21 additions & 14 deletions axon/deep_net.go
Original file line number Diff line number Diff line change
Expand Up @@ -248,13 +248,13 @@ func (net *Network) AddPTMaintThalForSuper(super, ct *Layer, thalSuffix, prjnCla
}
name := super.Name()
shp := super.Shape()
// is4D := false
is4D := false
ptExtra := 1 // extra size for pt layers
if shp.NumDims() == 2 {
pt = net.AddPTMaintLayer2D(name+"PT", shp.Dim(0)*ptExtra, shp.Dim(1)*ptExtra)
thal = net.AddBGThalLayer2D(name+thalSuffix, shp.Dim(0), shp.Dim(1))
} else {
// is4D = true
is4D = true
pt = net.AddPTMaintLayer4D(name+"PT", shp.Dim(0), shp.Dim(1), shp.Dim(2)*ptExtra, shp.Dim(3)*ptExtra)
thal = net.AddBGThalLayer4D(name+thalSuffix, shp.Dim(0), shp.Dim(1), shp.Dim(2), shp.Dim(3))
}
Expand All @@ -267,6 +267,9 @@ func (net *Network) AddPTMaintThalForSuper(super, ct *Layer, thalSuffix, prjnCla
"Layer.Inhib.Layer.Gi": "2",
"Layer.Inhib.Pool.Gi": "2",
}
if is4D {
pt.DefParams["Layer.Inhib.Pool.On"] = "true"
}
}

pthal, thalpt := net.BidirConnectLayers(pt, thal, ptThal)
Expand Down Expand Up @@ -360,17 +363,21 @@ func (net *Network) ConnectPTPredSelf(ly *Layer, pat prjn.Pattern) *Prjn {
return net.LateralConnectLayer(ly, pat).SetClass("PTSelfMaint").(AxonPrjn).AsAxon()
}

// ConnectPTPredToPulv connects PTPred with given Pulv: PTPred -> Pulv is class PTPredToPulv,
// ConnectPTToPulv connects PT, PTPred with given Pulv:
// PT -> Pulv is class PTToPulv; PT does NOT receive back from Pulv
// PTPred -> Pulv is class PTPredToPulv,
// From Pulv = type = Back, class = FmPulv
// toPulvPat is the prjn.Pattern PTPred -> Pulv and fmPulvPat is Pulv -> PTPred
// toPulvPat is the prjn.Pattern PT -> Pulv and fmPulvPat is Pulv -> PTPred
// Typically Pulv is a different shape than PTPred, so use Full or appropriate
// topological pattern. adds optional class name to projection.
func (net *Network) ConnectPTPredToPulv(ptPred, pulv *Layer, toPulvPat, fmPulvPat prjn.Pattern, prjnClass string) (toPulv, toPTPred *Prjn) {
func (net *Network) ConnectPTToPulv(pt, ptPred, pulv *Layer, toPulvPat, fmPulvPat prjn.Pattern, prjnClass string) (ptToPulv, ptPredToPulv, toPTPred *Prjn) {
if prjnClass != "" {
prjnClass = " " + prjnClass
}
toPulv = net.ConnectLayers(ptPred, pulv, toPulvPat, ForwardPrjn)
toPulv.SetClass("PTPredToPulv" + prjnClass)
ptToPulv = net.ConnectLayers(pt, pulv, toPulvPat, ForwardPrjn)
ptToPulv.SetClass("PTToPulv" + prjnClass)
ptPredToPulv = net.ConnectLayers(ptPred, pulv, toPulvPat, ForwardPrjn)
ptPredToPulv.SetClass("PTPredToPulv" + prjnClass)
toPTPred = net.ConnectLayers(pulv, ptPred, fmPulvPat, BackPrjn)
toPTPred.SetClass("FmPulv" + prjnClass)
return
Expand Down Expand Up @@ -513,7 +520,7 @@ func (net *Network) AddPFC2D(name, thalSuffix string, nNeurY, nNeurX int, decayO
pfc.AddClass(layClass)
pfcCT.AddClass(layClass)
// prjns are: super->PT, PT self
pfcPT, pfcThal = net.AddPTMaintThalForSuper(pfc, pfcCT, thalSuffix, prjnClass, one2one, full, full, selfMaint, space)
pfcPT, pfcThal = net.AddPTMaintThalForSuper(pfc, pfcCT, thalSuffix, prjnClass, one2one, full, one2one, selfMaint, space)
pfcPTp = net.AddPTPredLayer(pfcPT, pfcCT, full, full, prjnClass, space)
pfcPTp.SetClass(name)
pfcPT.AddClass(layClass)
Expand Down Expand Up @@ -566,7 +573,7 @@ func (net *Network) AddPFC2D(name, thalSuffix string, nNeurY, nNeurX int, decayO
// layP -> pfc, layP <-> pfcCT
// pfcPTp <-> layP
// sets PFCPrjn class name for projections
func (net *Network) ConnectToPFC(lay, layP, pfc, pfcCT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) {
func (net *Network) ConnectToPFC(lay, layP, pfc, pfcCT, pfcPT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) {
if prjnClass == "" {
prjnClass = "PFCPrjn"
}
Expand All @@ -579,22 +586,22 @@ func (net *Network) ConnectToPFC(lay, layP, pfc, pfcCT, pfcPTp *Layer, pat prjn.
pj.SetClass("ToPTp " + prjnClass)
}
net.ConnectToPulv(pfc, pfcCT, layP, pat, pat, prjnClass)
net.ConnectPTPredToPulv(pfcPTp, layP, pat, pat, prjnClass)
net.ConnectPTToPulv(pfcPT, pfcPTp, layP, pat, pat, prjnClass)
}

// ConnectToPFCBack connects given predictively learned input to all
// relevant PFC layers:
// lay -> pfc using a BackPrjn -- weaker
// layP -> pfc, layP <-> pfcCT
// pfcPTp <-> layP
func (net *Network) ConnectToPFCBack(lay, layP, pfc, pfcCT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) {
func (net *Network) ConnectToPFCBack(lay, layP, pfc, pfcCT, pfcPT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) {
if prjnClass == "" {
prjnClass = "PFCPrjn"
}
tp := net.ConnectLayers(lay, pfc, pat, BackPrjn)
tp.SetClass(prjnClass)
net.ConnectToPulv(pfc, pfcCT, layP, pat, pat, prjnClass)
net.ConnectPTPredToPulv(pfcPTp, layP, pat, pat, prjnClass)
net.ConnectPTToPulv(pfcPT, pfcPTp, layP, pat, pat, prjnClass)
pj := net.ConnectLayers(lay, pfcPTp, pat, ForwardPrjn) // ptp needs more input
pj.DefParams = params.Params{
"Prjn.PrjnScale.Abs": "4",
Expand All @@ -607,15 +614,15 @@ func (net *Network) ConnectToPFCBack(lay, layP, pfc, pfcCT, pfcPTp *Layer, pat p
// lay <-> pfc bidirectional
// layP -> pfc, layP <-> pfcCT
// pfcPTp <-> layP
func (net *Network) ConnectToPFCBidir(lay, layP, pfc, pfcCT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) (ff, fb *Prjn) {
func (net *Network) ConnectToPFCBidir(lay, layP, pfc, pfcCT, pfcPT, pfcPTp *Layer, pat prjn.Pattern, prjnClass string) (ff, fb *Prjn) {
if prjnClass == "" {
prjnClass = "PFCPrjn"
}
ff, fb = net.BidirConnectLayers(lay, pfc, pat)
ff.SetClass(prjnClass)
fb.SetClass(prjnClass)
net.ConnectToPulv(pfc, pfcCT, layP, pat, pat, prjnClass)
net.ConnectPTPredToPulv(pfcPTp, layP, pat, pat, prjnClass)
net.ConnectPTToPulv(pfcPT, pfcPTp, layP, pat, pat, prjnClass)
pj := net.ConnectLayers(lay, pfcPTp, pat, ForwardPrjn) // ptp needs more input
pj.DefParams = params.Params{
"Prjn.PrjnScale.Abs": "4",
Expand Down
20 changes: 15 additions & 5 deletions axon/layerparams.go
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,10 @@ func (ly *LayerParams) SpecialPreGs(ctx *Context, ni, di uint32, pl *Pool, vals
ctxExt := ly.Acts.Dt.GeSynFmRawSteady(geCtxt)
AddNrnV(ctx, ni, di, GeSyn, ctxExt)
saveVal = ctxExt // used In PostGs to set nrn.GeExt
case PTMaintLayer:
if ly.Acts.SMaint.On.IsTrue() {
saveVal = ly.Acts.SMaint.Inhib * NrnV(ctx, ni, di, GMaintRaw) // used In PostGs to set nrn.GeExt
}
case PulvinarLayer:
if ctx.PlusPhase.IsFalse() {
break
Expand Down Expand Up @@ -586,7 +590,9 @@ func (ly *LayerParams) SpecialPostGs(ctx *Context, ni, di uint32, saveVal float3
case BLALayer:
fallthrough
case CTLayer:
SetNrnV(ctx, ni, di, GeExt, saveVal)
fallthrough
case PTMaintLayer:
fallthrough
case PulvinarLayer:
SetNrnV(ctx, ni, di, GeExt, saveVal)
case PTPredLayer:
Expand All @@ -606,20 +612,24 @@ func (ly *LayerParams) GFmRawSyn(ctx *Context, ni, di uint32) {
extraSyn := float32(0)
nrnGModRaw := NrnV(ctx, ni, di, GModRaw)
nrnGModSyn := NrnV(ctx, ni, di, GModSyn)
ach := GlbV(ctx, di, GvACh)
switch ly.LayType {
case PTMaintLayer:
mod := ly.Acts.Dend.ModGain * nrnGModSyn
if ly.Acts.Dend.ModACh.IsTrue() {
mod *= GlbV(ctx, di, GvACh)
mod *= ach
}
mod += ly.Acts.Dend.ModBase
MulNrnV(ctx, ni, di, GeRaw, mod) // key: excluding GModMaint here, so active maintenance can persist
MulNrnV(ctx, ni, di, GeSyn, mod)
extraRaw = GlbV(ctx, di, GvACh) * ly.Acts.Dend.ModGain * nrnGModRaw
extraRaw = ly.Acts.Dend.ModGain * nrnGModRaw
if ly.Acts.Dend.ModACh.IsTrue() {
extraRaw *= ach
}
extraSyn = mod
case BLALayer:
extraRaw = GlbV(ctx, di, GvACh) * nrnGModRaw * ly.Acts.Dend.ModGain
extraSyn = GlbV(ctx, di, GvACh) * nrnGModSyn * ly.Acts.Dend.ModGain
extraRaw = ach * nrnGModRaw * ly.Acts.Dend.ModGain
extraSyn = ach * nrnGModSyn * ly.Acts.Dend.ModGain
default:
if ly.Acts.Dend.HasMod.IsTrue() {
mod := ly.Acts.Dend.ModBase + ly.Acts.Dend.ModGain*nrnGModSyn
Expand Down
37 changes: 19 additions & 18 deletions axon/pvlv_net.go
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ func (net *Network) AddACCost(ctx *Context, nCosts, accY, accX int, space float3
// Makes all appropriate interconnections and sets default parameters.
// Needs CS -> BLA, OFC connections to be made.
// Returns layers most likely to be used for remaining connections and positions.
func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofcY, ofcX int, space float32) (vSgpi, vSmtxGo, vSmtxNo, vSpatchD1, vSpatchD2, urgency, usPos, pvPos, usNeg, usNegP, pvNeg, pvNegP, blaPosAcq, blaPosExt, blaNegAcq, blaNegExt, blaNov, ofcPosUS, ofcPosUSCT, ofcPosUSPTp, ilPos, ilPosCT, ilPosPTp, ilPosMD, ofcNegUS, ofcNegUSCT, ofcNegUSPTp, accCost, accCostCT, accCostPTp, ilNeg, ilNegCT, ilNegPTp, ilNegMD, sc *Layer) {
func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofcY, ofcX int, space float32) (vSgpi, vSmtxGo, vSmtxNo, vSpatchD1, vSpatchD2, urgency, usPos, pvPos, usNeg, usNegP, pvNeg, pvNegP, blaPosAcq, blaPosExt, blaNegAcq, blaNegExt, blaNov, ofcPosUS, ofcPosUSCT, ofcPosUSPT, ofcPosUSPTp, ilPos, ilPosCT, ilPosPT, ilPosPTp, ilPosMD, ofcNegUS, ofcNegUSCT, ofcNegUSPT, ofcNegUSPTp, accCost, accCostCT, accCostPT, accCostPTp, ilNeg, ilNegCT, ilNegPT, ilNegPTp, ilNegMD, sc *Layer) {
nUSpos := int(net.PVLV.NPosUSs)
nUSneg := int(net.PVLV.NNegUSs)
nCosts := int(net.PVLV.NCosts)
Expand Down Expand Up @@ -585,10 +585,10 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc
ofcNegUS, ofcNegUSCT, ofcNegUSPT, ofcNegUSPTp, ofcNegUSMD := net.AddOFCnegUS(ctx, nUSneg, ofcY, ofcX, space)
_ = ofcNegUSPT

ilPos, ilPosCT, ilPosPT, ilPosPTp, ilPosMD := net.AddPFC2D("ILpos", "MD", ofcY, ofcX, true, true, space)
ilPos, ilPosCT, ilPosPT, ilPosPTp, ilPosMD = net.AddPFC2D("ILpos", "MD", ofcY, ofcX, true, true, space)
_ = ilPosPT

ilNeg, ilNegCT, ilNegPT, ilNegPTp, ilNegMD := net.AddPFC2D("ILneg", "MD", ofcY, ofcX, true, true, space)
ilNeg, ilNegCT, ilNegPT, ilNegPTp, ilNegMD = net.AddPFC2D("ILneg", "MD", ofcY, ofcX, true, true, space)
_ = ilNegPT

ilPosPT.DefParams["Layer.Acts.Dend.ModACh"] = "true"
Expand Down Expand Up @@ -824,9 +824,9 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc
net.ConnectToPulv(ofcPosUS, ofcPosUSCT, pvPosP, full, full, prjnClass)

// note: newly trying this
net.ConnectPTPredToPulv(ofcPosUSPTp, drivesP, p1to1, p1to1, prjnClass)
net.ConnectPTPredToPulv(ofcPosUSPTp, usPosP, p1to1, p1to1, prjnClass)
net.ConnectPTPredToPulv(ofcPosUSPTp, pvPosP, p1to1, p1to1, prjnClass)
net.ConnectPTToPulv(ofcPosUSPT, ofcPosUSPTp, drivesP, p1to1, p1to1, prjnClass)
net.ConnectPTToPulv(ofcPosUSPT, ofcPosUSPTp, usPosP, p1to1, p1to1, prjnClass)
net.ConnectPTToPulv(ofcPosUSPT, ofcPosUSPTp, pvPosP, p1to1, p1to1, prjnClass)

///////////////////////////////////////////
// ILpos
Expand All @@ -842,9 +842,9 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc

// note: do *not* bidirectionally connect PTp layers -- too much sustained activity

net.ConnectToPFC(pvPos, pvPosP, ilPos, ilPosCT, ilPosPTp, full, "PVToIL")
net.ConnectToPFC(pvPos, pvPosP, ilPos, ilPosCT, ilPosPT, ilPosPTp, full, "PVToIL")

net.ConnectPTPredToPulv(ilPosPTp, pvPosP, full, full, prjnClass)
net.ConnectPTToPulv(ilPosPT, ilPosPTp, pvPosP, full, full, prjnClass)

// note: not connecting deeper CT and PT layers to vSmtxGo at this point
// could explore that later
Expand All @@ -862,8 +862,8 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc

net.ConnectToPulv(ofcNegUS, ofcNegUSCT, usNegP, p1to1, p1to1, prjnClass)
net.ConnectToPulv(ofcNegUS, ofcNegUSCT, pvNegP, full, full, prjnClass)
net.ConnectPTPredToPulv(ofcNegUSPTp, usNegP, p1to1, p1to1, prjnClass)
net.ConnectPTPredToPulv(ofcNegUSPTp, pvNegP, full, full, prjnClass)
net.ConnectPTToPulv(ofcNegUSPT, ofcNegUSPTp, usNegP, p1to1, p1to1, prjnClass)
net.ConnectPTToPulv(ofcNegUSPT, ofcNegUSPTp, pvNegP, full, full, prjnClass)

///////////////////////////////////////////
// Costs
Expand All @@ -873,8 +873,8 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc

net.ConnectToPulv(accCost, accCostCT, costP, p1to1, p1to1, prjnClass)
net.ConnectToPulv(accCost, accCostCT, pvNegP, full, full, prjnClass)
net.ConnectPTPredToPulv(accCostPTp, costP, p1to1, p1to1, prjnClass)
net.ConnectPTPredToPulv(accCostPTp, pvNegP, full, full, prjnClass)
net.ConnectPTToPulv(accCostPT, accCostPTp, costP, p1to1, p1to1, prjnClass)
net.ConnectPTToPulv(accCostPT, accCostPTp, pvNegP, full, full, prjnClass)

///////////////////////////////////////////
// ILneg
Expand All @@ -897,8 +897,8 @@ func (net *Network) AddPVLVOFCus(ctx *Context, nYneur, popY, popX, bgY, bgX, ofc

// note: do *not* bidirectionally connect PTp layers -- too much sustained activity

net.ConnectToPFC(pvNeg, pvNegP, ilNeg, ilNegCT, ilNegPTp, full, "PVToIL")
net.ConnectPTPredToPulv(ilNegPTp, pvNegP, full, full, prjnClass)
net.ConnectToPFC(pvNeg, pvNegP, ilNeg, ilNegCT, ilNegPT, ilNegPTp, full, "PVToIL")
net.ConnectPTToPulv(ilNegPT, ilNegPTp, pvNegP, full, full, prjnClass)

// note: not connecting deeper CT and PT layers to vSmtxGo at this point
// could explore that later
Expand Down Expand Up @@ -934,10 +934,11 @@ func (net *Network) AddBOA(ctx *Context, nYneur, popY, popX, bgY, bgX, pfcY, pfc
full := prjn.NewFull()
var pj *Prjn

vSgpi, vSmtxGo, vSmtxNo, vSpatchD1, vSpatchD2, urgency, usPos, pvPos, usNeg, usNegP, pvNeg, pvNegP, blaPosAcq, blaPosExt, blaNegAcq, blaNegExt, blaNov, ofcPosUS, ofcPosUSCT, ofcPosUSPTp, ilPos, ilPosCT, ilPosPTp, ilPosMD, ofcNegUS, ofcNegUSCT, ofcNegUSPTp, accCost, accCostCT, accCostPTp, ilNeg, ilNegCT, ilNegPTp, ilNegMD, sc := net.AddPVLVOFCus(ctx, nYneur, popY, popX, bgY, bgX, pfcY, pfcX, space)
vSgpi, vSmtxGo, vSmtxNo, vSpatchD1, vSpatchD2, urgency, usPos, pvPos, usNeg, usNegP, pvNeg, pvNegP, blaPosAcq, blaPosExt, blaNegAcq, blaNegExt, blaNov, ofcPosUS, ofcPosUSCT, ofcPosUSPT, ofcPosUSPTp, ilPos, ilPosCT, ilPosPT, ilPosPTp, ilPosMD, ofcNegUS, ofcNegUSCT, ofcNegUSPT, ofcNegUSPTp, accCost, accCostCT, accCostPT, accCostPTp, ilNeg, ilNegCT, ilNegPT, ilNegPTp, ilNegMD, sc := net.AddPVLVOFCus(ctx, nYneur, popY, popX, bgY, bgX, pfcY, pfcX, space)
_, _, _, _, _, _, _ = usPos, usNeg, usNegP, pvNeg, pvNegP, ilPosCT, ilNegMD
_, _, _ = accCost, accCostCT, accCostPTp
_, _ = blaNegAcq, blaNegExt
_, _, _, _, _ = ofcPosUSPT, ofcNegUSPT, ilPosPT, ilNegPT, accCostPT

// ILposP is what PLutil predicts, in order to learn about value (reward)
ilPosP := net.AddPulvForSuper(ilPos, space)
Expand Down Expand Up @@ -971,7 +972,7 @@ func (net *Network) AddBOA(ctx *Context, nYneur, popY, popX, bgY, bgX, pfcY, pfc

// net.ConnectCTSelf(ilNegCT, full) // todo: test
// todo: ofcNeg
// net.ConnectToPFC(effort, effortP, ilNeg, ilNegCT, ilNegPTp, full)
// net.ConnectToPFC(effort, effortP, ilNeg, ilNegCT, ilNegPT, ilNegPTp, full)
// note: can provide input from *other* relevant inputs not otherwise being predicted
// net.ConnectLayers(dist, ilNegPTPred, full, ForwardPrjn).SetClass("ToPTPred")

Expand All @@ -981,11 +982,11 @@ func (net *Network) AddBOA(ctx *Context, nYneur, popY, popX, bgY, bgX, pfcY, pfc
// net.ConnectCTSelf(plUtilCT, full) // todo: test

// util predicts OFCval and ILneg
pj, _ = net.ConnectToPFCBidir(ilPos, ilPosP, plUtil, plUtilCT, plUtilPTp, full, "ILToPL")
pj, _ = net.ConnectToPFCBidir(ilPos, ilPosP, plUtil, plUtilCT, plUtilPT, plUtilPTp, full, "ILToPL")
pj.DefParams = params.Params{
"Prjn.PrjnScale.Abs": "1", // not good to make this stronger actually
}
pj, _ = net.ConnectToPFCBidir(ilNeg, ilNegP, plUtil, plUtilCT, plUtilPTp, full, "ILToPL")
pj, _ = net.ConnectToPFCBidir(ilNeg, ilNegP, plUtil, plUtilCT, plUtilPT, plUtilPTp, full, "ILToPL")
pj.DefParams = params.Params{
"Prjn.PrjnScale.Abs": "3", // drive acc stronger -- only this one works well
}
Expand Down
18 changes: 12 additions & 6 deletions examples/pfcmaint/params.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,16 @@ var ParamSets = netparams.Sets{
{Sel: ".PTMaintLayer", Desc: "time integration params",
Params: params.Params{
"Layer.Acts.Dend.ModGain": "1.5",
"Layer.Acts.GabaB.Gbar": "0.015", // too strong and it depresses firing for a long time
"Layer.Acts.GabaB.Gbar": "0.01", // too strong and it depresses firing for a long time
"Layer.Acts.SMaint.On": "true",
"Layer.Acts.SMaint.NNeurons": "10",
"Layer.Acts.SMaint.ISI.Min": "3", // too high and fails to take
"Layer.Acts.SMaint.NNeurons": "10", // higher = more activity
"Layer.Acts.SMaint.ISI.Min": "1", // 1 sig better than 3
"Layer.Acts.SMaint.ISI.Max": "20", // not much effect
"Layer.Acts.SMaint.Gbar": "1", // 0.5 = slower to overcome gabab
"Layer.Inhib.Layer.Gi": "2",
"Layer.Inhib.Pool.Gi": "2", // not active
"Layer.Acts.SMaint.Gbar": "0.2",
"Layer.Acts.SMaint.Inhib": "1",
"Layer.Inhib.ActAvg.Nominal": "0.1",
"Layer.Inhib.Layer.Gi": "0.5",
"Layer.Inhib.Pool.Gi": "0.5", // not active
}},
{Sel: ".BGThalLayer", Desc: "",
Params: params.Params{
Expand All @@ -63,6 +65,10 @@ var ParamSets = netparams.Sets{
Params: params.Params{
"Prjn.PrjnScale.Abs": "4.0", // 4 > 2 for gating sooner
}},
{Sel: "#PFCPTpToItemP", Desc: "weaker",
Params: params.Params{
"Prjn.PrjnScale.Abs": "1",
}},
},
}

Expand Down
Loading

0 comments on commit 2a49099

Please sign in to comment.