changing weights in a network during learning

Moderator: wwlytton

Post Reply
vcut

changing weights in a network during learning

Post by vcut »

Dear all,

I have constructed a network of 5 biophysical cells, each one of which is activated by a different NetStim. I included also three synapses on each of the cells (two excitatory and one inhibitory). Is it possible to change the weights of S0 -> M1.E2 (see below) at each time step?

Here is the source code of the network:

Code: Select all

// Artificial cells no longer need a default section.
//Network cell templates
//   M_Cell
//Artificial cells
//   S_NetStim

ns = 5
mc = 5

objref MC[mc]
begintemplate M_Cell
public is_art
public init, topol, basic_shape, subsets, geom, biophys, geom_nseg, biophys_inhomo
public synlist, connect2target

public soma, dend, axon
public all

objref synlist

proc init() {
  topol()
  subsets()
  geom()
  biophys()
  geom_nseg()
  synlist = new List()
  synapses()
}

create soma, dend, axon

proc topol() { local i
  connect dend(0), soma(1)
  connect axon(0), soma(0)
  basic_shape()
}
proc basic_shape() {
  soma {pt3dclear() pt3dadd(0, 0, 0, 1) pt3dadd(15, 0, 0, 1)}
  dend {pt3dclear() pt3dadd(15, 0, 0, 1) pt3dadd(75, 0, 0, 1)}
  axon {pt3dclear() pt3dadd(0, 0, 0, 1) pt3dadd(-59, 0, 0, 1)}
}

objref all
proc subsets() { local i
  objref all
  all = new SectionList()
    soma all.append()
    dend all.append()
    axon all.append()

}
proc geom() {
  forsec all {  }
  soma {  L = 100  diam = 1  }
  dend {  L = 100  diam = 1  }
  axon {  L = 100  diam = 1  }
}
external lambda_f
proc geom_nseg() {
  forsec all { nseg = int((L/(0.1*lambda_f(100))+.9)/2)*2 + 1  }
}
proc biophys() {
  forsec all {
    Ra = 35.4
    cm = 1
  }
  soma {
    insert hh
      gnabar_hh = 0.12
      gkbar_hh = 0.036
      gl_hh = 0.0003
      el_hh = -54.3
  }
  dend {
    insert hh
      gnabar_hh = 0.12
      gkbar_hh = 0.036
      gl_hh = 0.0003
      el_hh = -54.3
  }
  axon {
    insert hh
      gnabar_hh = 0.12
      gkbar_hh = 0.036
      gl_hh = 0.0003
      el_hh = -54.3
  }
}
proc biophys_inhomo(){}
proc connect2target() { //$o1 target point process, $o2 returned NetCon
  soma $o2 = new NetCon(&v(1), $o1)
}
objref syn_
proc synapses() {
  /* E0 */   soma syn_ = new ExpSyn(0.4)  synlist.append(syn_)
    syn_.tau = 10
  /* I1 */   dend syn_ = new Exp2Syn(0.4)  synlist.append(syn_)
    syn_.tau1 = 0.5
    syn_.tau2 = 1
  /* E2 */   dend syn_ = new ExpSyn(0.8)  synlist.append(syn_)
    syn_.tau = 10
}
func is_art() { return 0 }

endtemplate M_Cell

objref NS[ns]
begintemplate S_NetStim
public pp, connect2target, x, y, z, position, is_art
objref pp
proc init() {
  pp = new NetStim()
}
func is_art() { return 1 }
proc connect2target() { $o2 = new NetCon(pp, $o1) }
endtemplate S_NetStim

//Network specification interface
for i=0, ns-1 {NS[i] = new S_NetStim(i)}
for i=0, mc-1 {MC[i] = new M_Cell(i)}


objref cells, nclist, netcon
{cells = new List()  nclist = new List()}

func cell_append() {cells.append($o1)
	return cells.count - 1
}

func nc_append() {//srcindex, tarcelindex, synindex
  if ($3 >= 0) {
    cells.object($1).connect2target(cells.object($2).synlist.object($3),netcon)
    netcon.weight = $4   netcon.delay = $5
  }else{
    cells.object($1).connect2target(cells.object($2).pp,netcon)
    netcon.weight = $4   netcon.delay = $5
  }
  nclist.append(netcon)
  return nclist.count - 1
}

//Network instantiation

/* S0 */  for i = 0, ns-1 {cell_append(NS[i])}
/* M1 */  for i = 0, mc-1 {cell_append(MC[i])}
 
objref r, a
double wts[ns][mc]
proc initNet() {

	r = new Random()
	r.uniform(0,0.02)
	a = new Vector(ns*mc)
	a.setrand(r)
	
	i = 0
	for j = 0, mc-1 {
		wts[i][j] = a.x[j]
 		/* S0 -> M1.E0 */  nc_append(i,   ns+j, 0,  0.02,0.5)
  		/* S0 -> M1.E2 */  nc_append(i,   ns+j, 2,  wts[i][j],0)
  		i = i + 1
	}
}

/////// custom run() and init() /////////////
proc run() {
	stdinit()
	continuerun(tstop)
	initNet()
}

proc init() {
	finitialize(v_init)
	if (cvode.active()) {
		cvode.re_init()
	} else {
		fcurrent()
	}
	frecord_init()
}	


///////// user interface ////////
{
xpanel("RunControl", 0)
v_init = -65
xvalue("Init","v_init", 1,"stdinit()", 1, 1 )
xbutton("Init & Run","run()")
xbutton("Stop","stoprun=1")
runStopAt = 5
xvalue("Continue til","runStopAt", 1,"{continuerun(runStopAt) stoprun=1}", 1, 1 )
runStopIn = 1
xvalue("Continue for","runStopIn", 1,"{continuerun(t + runStopIn) stoprun=1}", 1, 1 )
xbutton("Single Step","steprun()")
t = 0
xvalue("t","t", 2 )
tstop = 350
xvalue("Tstop","tstop", 1,"tstop_changed()", 0, 1 )
dt = 0.025
xvalue("dt","dt", 1,"setdt()", 0, 1 )
steps_per_ms = 40
xvalue("Points plotted/ms","steps_per_ms", 1,"setdt()", 0, 1 )
screen_update_invl = 0.05
xvalue("Scrn update invl","screen_update_invl", 1,"", 0, 1 )
realtime = 0
xvalue("Real Time","realtime", 0,"", 0, 1 )
xpanel(158,158)
}
ted
Site Admin
Posts: 6299
Joined: Wed May 18, 2005 4:50 pm
Location: Yale University School of Medicine
Contact:

Post by ted »

Without reading through all your code (which I wrapped inside the Forum's markup
for code so that formatting isn't lost--see <b>Preserving code and text formatting</b>
https://www.neuron.yale.edu/phpBB2/viewtopic.php?t=493)-------------------

If you know in advance what values to use, try the Vector class's play() method--see
http://www.neuron.yale.edu/neuron/stati ... .html#play

If the new values are to be calculated in the course of the simulation, the easiest way is
to use a custom proc advance(). See <b>7.2.1.2 advance()</b> on page 161 of The
NEURON Book, or let me know if you need more information, but the basic idea is

Code: Select all

proc advance() {
  // anything to be done before fadvance is called here
  fadvance()
  // anything to be done after fadvance is called here
}
This will work with fixed dt. If you plan to use the adaptive integrator, you'll have to
reinitialize CVode after each weight change. More convenient would be to use a
synaptic mechanism specified in NMODL that has a built-in learning rule for adjusting
weight. Implementational details depend entirely on what rule you have in mind.


A minor comment--it is usually a good idea to execute
load_file("nrngui.hoc")
or at least
load_file("noload.hoc")
(which doesn't bring up the NEURONMainMenu toolbar)
early or even as the very first executed statement. This ensures that the standard
run system is loaded. There are times when the standard run system is not needed,
but those are rare exceptions. In this particular case, it would eliminate the need to
provide your own init(), and ensure the existence of a proc advance() and a call chain
that invokes advance() in the course of simulation.
vcut

Post by vcut »

Actually I am planning to use a StdwaSA.mod file that I found in ModelDB (which I post below). I've read example 10.1.1-Graded synaptic transmission , which gives some hints of how to use the pointer CAPRE and link it to another variable at some location (see below)

Code: Select all

objref syn
somedenrite syn = new GradSyn(0.8)
setpointer syn.capre, precell.bouton.cai(0.5) 
In my case though (I think), the pointer wsyn in StdwasSA.mod file isn't meant to be linked to a section, but change the weight in of S0 -> M1.E2 (see previous posting) at each time step. How can I call the pointer wsyn from within the nc_append() function? Maybe I am thinking about it the wrong way????


Code: Select all

COMMENT
Spike Timing Dependent Weight Adjuster
based on Song and Abbott, 2001.
Andrew Davison, UNIC, CNRS, 2003-2004
ENDCOMMENT

NEURON {
	POINT_PROCESS StdwaSA
	RANGE interval, tlast_pre, tlast_post, M, P
	RANGE deltaw, wmax, aLTP, aLTD
	GLOBAL tauLTP, tauLTD, on
	POINTER wsyn
}

ASSIGNED {
	interval	(ms)	: since last spike of the other kind
	tlast_pre	(ms)	: time of last presynaptic spike
	tlast_post	(ms)	: time of last postsynaptic spike
	M			: LTD function
	P			: LTP function
	deltaw			: change in weight
	wsyn			: weight of the synapse
}

INITIAL {
	interval = 0
	tlast_pre = 0
	tlast_post = 0
	M = 0
	P = 0
	deltaw = 0
}

PARAMETER {
	tauLTP  = 20	(ms)    : decay time for LTP part ( values from           )
	tauLTD  = 20	(ms)    : decay time for LTD part ( Song and Abbott, 2001 )
	wmax    = 1		: min and max values of synaptic weight
	aLTP    = 0.001		: amplitude of LTP steps
	aLTD    = 0.00106	: amplitude of LTD steps
	on	= 1		: allows learning to be turned on and off globally
}

NET_RECEIVE (w) {
	if (w >= 0) {				: this is a pre-synaptic spike
		P = P*exp((tlast_pre-t)/tauLTP) + aLTP
		interval = tlast_post - t	: interval is negative
		tlast_pre = t
		deltaw = wmax * M * exp(interval/tauLTD)
	} else {				: this is a post-synaptic spike
		M = M*exp((tlast_post-t)/tauLTD) - aLTD
		interval = t - tlast_pre	: interval is positive
		tlast_post = t
		deltaw = wmax * P * exp(-interval/tauLTP)
	}
	if (on) {
		wsyn = wsyn + deltaw
		if (wsyn > wmax) {
			wsyn = wmax
		}
		if (wsyn < 0) {
			wsyn = 0
		}
	}
}
ted
Site Admin
Posts: 6299
Joined: Wed May 18, 2005 4:50 pm
Location: Yale University School of Medicine
Contact:

Post by ted »

You're trying to fit a round peg into a square hole. When Andrew Davison developed the
code he used for this paper
Davison AP, Fregnac Y (2006) Learning Cross-Modal Spatial Transformations through Spike Timing-Dependent Plasticity J Neurosci 26:5604-5615
http://senselab.med.yale.edu/senselab/m ... odel=64261
he used some very clever and elaborate strategies to set up connections between cells
and make them manifest spike timing dependent plasticity. Unfortunately, these
strategies are incompatible with the Network Builder's strategy for connecting cells. If
you want to use his mod files, you can't use the Network Builder to construct your network--
you'll have to do it in hoc, the way he did.
Post Reply