From e5aa9e8095ffccc68e1c027599f8228a67174008 Mon Sep 17 00:00:00 2001 From: Sandro Date: Wed, 26 Jul 2023 12:23:06 +0200 Subject: [PATCH 01/34] Exlude tests from discovery / installation Without it tests/ will be installed as a top level Python module. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9e56161d8..0eead40d8 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,7 @@ keywords=["neuron", "network", "developing", "framework", "biological", "simulation"], # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). - packages=find_packages(exclude=["saveLoadV1"]), + packages=find_packages(exclude=["saveLoadV1", "tests*"]), # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: From 2507033226fe449082b0863935c9ea2d23a3c7ce Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Tue, 20 Feb 2024 16:07:33 +0000 Subject: [PATCH 02/34] ci: update setup-python action To fix this annoying note: "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: actions/setup-python@v4.." --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c07c86dfb..809779425 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} From a79c85b37303afe932cd33a2f34b2f004bfa9785 Mon Sep 17 00:00:00 2001 From: SamN Date: Fri, 1 Mar 2024 10:57:16 -0500 Subject: [PATCH 03/34] add default padding (==1) for plotCSD, to avoid top/bottom edge artifacts in color plots --- netpyne/plotting/plotCSD.py | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/netpyne/plotting/plotCSD.py b/netpyne/plotting/plotCSD.py index fd9fbb5cf..3c8e20991 100644 --- a/netpyne/plotting/plotCSD.py +++ b/netpyne/plotting/plotCSD.py @@ -9,6 +9,14 @@ import numpy as np import scipy +def getPaddedCSD (CSDData, pad): + # pad the first/last row of CSDData by replication (to avoid edge artifacts when drawing colors plots) + npcsd = [] + for i in range(pad): npcsd.append(CSDData[0,:]) + for i in range(CSDData.shape[0]): npcsd.append(CSDData[i,:]) + for i in range(pad): npcsd.append(CSDData[-1,:]) + npcsd=np.array(npcsd) + return npcsd @exception def plotCSD( @@ -31,6 +39,7 @@ def plotCSD( showFig=False, smooth=True, colorbar=True, + pad=1, **kwargs ): """ @@ -113,9 +122,13 @@ def plotCSD( **Default:** ``True`` colorbar : bool - Whetehr or not to plot the colorbar + Whether or not to plot the colorbar **Default:** ``True`` + pad : int + Amount to pad CSDData on top/bottom for more accurate interpolation at edges + **Default:** ``1`` + """ # If there is no input data, get the data from the NetPyNE sim object @@ -137,6 +150,9 @@ def plotCSD( else: pass # TODO: ensure time slicing works properly in case CSDData is passed as an argument + npcsd = CSDData + if pad > 0: npcsd = getPaddedCSD(CSDData, pad) # apply padding (replicate first,last rows) + if timeRange is None: timeRange = [0, sim.cfg.duration] @@ -145,12 +161,13 @@ def plotCSD( # PLOTTING X = np.arange(timeRange[0], timeRange[1], dt) # X == tt - Y = np.arange(CSDData.shape[0]) + Y = np.arange(npcsd.shape[0]) # interpolation - CSD_spline = scipy.interpolate.RectBivariateSpline(Y, X, CSDData) - Y_plot = np.linspace(0, CSDData.shape[0], num=1000) - Z = CSD_spline(Y_plot, X) + fctr = int(1000 / CSDData.shape[0]) + CSD_spline = scipy.interpolate.RectBivariateSpline(Y, X, npcsd) + Y_plot = np.linspace(-pad, npcsd.shape[0] + pad, num=int(1000*npcsd.shape[0]/CSDData.shape[0])) + Z = CSD_spline(Y_plot, X)[pad*fctr:pad*fctr+1000,:] # plotting options plt.rcParams.update({'font.size': fontSize}) @@ -158,7 +175,7 @@ def plotCSD( xmax = int(X[-1]) + 1 ymin = 0 if ymax is None: - ymax = sim.cfg.recordLFP[-1][1] + spacing_um + ymax = sim.cfg.recordLFP[-1][1] + spacing_um + pad extent_xy = [xmin, xmax, ymax, ymin] # set up figure @@ -229,7 +246,7 @@ def plotCSD( subaxs[chan].margins(0.0, 0.01) subaxs[chan].get_xaxis().set_visible(False) subaxs[chan].get_yaxis().set_visible(False) - subaxs[chan].plot(X, CSDData[chan, :], color='green', linewidth=0.3, label='CSD time series') + subaxs[chan].plot(X, npcsd[pad+chan, :], color='green', linewidth=0.3, label='CSD time series') if legendLabel: subaxs[chan].legend(loc='upper right', fontsize=fontSize) legendLabel = False From a7c06ec8f760d8ea136ef950d2d66e1f024df394 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Tue, 5 Mar 2024 11:20:52 +0100 Subject: [PATCH 04/34] better handling of exceptions in `importCellParams()` (incl. issue 782) --- CHANGES.md | 8 ++++++++ netpyne/conversion/neuronPyHoc.py | 8 ++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 1122ddf2d..d798fdc2a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,11 @@ +# Version in development + +**New features** + +**Bug fixes** + +- Better handling of exceptions in `importCellParams()` (incl. issue 782) + # Version 1.0.6 **New features** diff --git a/netpyne/conversion/neuronPyHoc.py b/netpyne/conversion/neuronPyHoc.py index 105b85b77..737c5f3ac 100644 --- a/netpyne/conversion/neuronPyHoc.py +++ b/netpyne/conversion/neuronPyHoc.py @@ -286,7 +286,9 @@ def importCell(fileName, cellName, cellArgs=None, cellInstance=False): cellArgs = [] # Define as empty list if not otherwise defined if fileName.endswith('.hoc') or fileName.endswith('.tem'): - h.load_file(fileName) + resultCode = h.load_file(fileName) + if resultCode == 0: # error + raise Exception(f"Error occured in h.load_file() when loading {fileName}. See above for details.") if not cellInstance: if isinstance(cellArgs, dict): cell = getattr(h, cellName)(**cellArgs) # create cell using template, passing dict with args @@ -318,9 +320,7 @@ def importCell(fileName, cellName, cellArgs=None, cellInstance=False): cell = load(fileName) else: - print("File name should end in '.hoc', '.py', or '.swc'") - return - + raise Exception("File name should end in '.hoc', '.py', or '.swc'") secDic, secListDic, synMechs, globs = getCellParams(cell, varList, origGlob) if fileName.endswith('.py'): From 8d1f4b84a106737d76beed01ca04cc3aef5a1d73 Mon Sep 17 00:00:00 2001 From: "Ankur Sinha (Ankur Sinha Gmail)" Date: Sun, 17 Mar 2024 21:41:45 +0000 Subject: [PATCH 05/34] fix(py312): remove `imp` The `imp` module has been removed in Python 3.12: https://docs.python.org/3/whatsnew/3.12.html#imp --- examples/netClamp/src/cfg.py | 3 +-- examples/rxd_buffering/src/netParams.py | 3 +-- netpyne/batch/grid.py | 1 - 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/examples/netClamp/src/cfg.py b/examples/netClamp/src/cfg.py index 88d0c68db..dfac67e86 100644 --- a/examples/netClamp/src/cfg.py +++ b/examples/netClamp/src/cfg.py @@ -1,4 +1,3 @@ -import imp from netpyne import specs # Simulation options @@ -14,4 +13,4 @@ cfg.recordStim = True # required for netClamp to work cfg.analysis['plotRaster'] = {'orderBy': 'y', 'orderInverse': True} # Plot a raster -cfg.analysis['plotTraces'] = {'include': [5]} # Plot recorded traces for this list of cells \ No newline at end of file +cfg.analysis['plotTraces'] = {'include': [5]} # Plot recorded traces for this list of cells diff --git a/examples/rxd_buffering/src/netParams.py b/examples/rxd_buffering/src/netParams.py index 06a2014b2..22190e495 100644 --- a/examples/rxd_buffering/src/netParams.py +++ b/examples/rxd_buffering/src/netParams.py @@ -1,4 +1,3 @@ -import imp from netpyne import specs netParams = specs.NetParams() # object of class NetParams to store the network parameters @@ -27,4 +26,4 @@ netParams.rxdParams['reactions'] = {'buffering': {'reactant': '2 * ca + buf', 'product': 'cabuf', 'rate_f': kf, 'rate_b': kb}} ### rates -netParams.rxdParams['rates'] = {'degradation': {'species': 'buf', 'rate': '-1e-3 * buf'}} \ No newline at end of file +netParams.rxdParams['rates'] = {'degradation': {'species': 'buf', 'rate': '-1e-3 * buf'}} diff --git a/netpyne/batch/grid.py b/netpyne/batch/grid.py index adec7a77f..9d214142c 100644 --- a/netpyne/batch/grid.py +++ b/netpyne/batch/grid.py @@ -24,7 +24,6 @@ to_unicode = str import pandas as pd -import imp import os, sys import glob from time import sleep From b35b841a2c516d25f3ca4c359a730bd1b14067d2 Mon Sep 17 00:00:00 2001 From: Roman Baravalle <44242643+RomanB22@users.noreply.github.com> Date: Wed, 27 Mar 2024 09:31:15 -0500 Subject: [PATCH 06/34] Pandas deprecated parameter fix The inplace=True keyword was removed from newer Pandas versions, thus the inplace option is replaced by re-assigning the variable, following Pandas recs (https://github.com/pandas-dev/pandas/issues/57104) Changing df['popInd'].cat.set_categories(sim.net.pops.keys(), inplace=True) by df['popInd'] = df['popInd'].cat.set_categories(sim.net.pops.keys()) in analysis/spikes.py --- netpyne/analysis/spikes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/netpyne/analysis/spikes.py b/netpyne/analysis/spikes.py index 98fc65297..c42f666e4 100644 --- a/netpyne/analysis/spikes.py +++ b/netpyne/analysis/spikes.py @@ -83,7 +83,7 @@ def prepareSpikeData( orderBy = 'gid' elif orderBy == 'pop': df['popInd'] = df['pop'].astype('category') - df['popInd'].cat.set_categories(sim.net.pops.keys(), inplace=True) + df['popInd'] = df['popInd'].cat.set_categories(sim.net.pops.keys()) orderBy = 'popInd' elif isinstance(orderBy, basestring) and not isinstance(cells[0]['tags'][orderBy], Number): orderBy = 'gid' @@ -91,7 +91,7 @@ def prepareSpikeData( if isinstance(orderBy, list): if 'pop' in orderBy: df['popInd'] = df['pop'].astype('category') - df['popInd'].cat.set_categories(sim.net.pops.keys(), inplace=True) + df['popInd'] = df['popInd'].cat.set_categories(sim.net.pops.keys()) orderBy[orderBy.index('pop')] = 'popInd' keep = keep + list(set(orderBy) - set(keep)) elif orderBy not in keep: From c8b654dbb967f62935c1febcee6c1b9a95a38b8d Mon Sep 17 00:00:00 2001 From: vvbragin Date: Thu, 28 Mar 2024 15:02:40 +0100 Subject: [PATCH 07/34] fixed pointer id overflow on MPI (e.g. for gap junctions) --- CHANGES.md | 2 ++ netpyne/cell/compartCell.py | 9 ++++----- netpyne/network/network.py | 8 +++++++- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index d798fdc2a..850f7d219 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -6,6 +6,8 @@ - Better handling of exceptions in `importCellParams()` (incl. issue 782) +- Fixed pointer id overflow on MPI (e.g. for gap junctions) + # Version 1.0.6 **New features** diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index d0f1c4646..f15eeb853 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -1089,12 +1089,11 @@ def __parsePointerParams(self, params): def __generatePointerIds(self, pointerParams, params): from .. import sim - # see comments in `__parsePointerParams()` for more details - if hasattr(sim, 'rank'): - preToPostId = 1e9 * sim.rank + sim.net.lastPointerId # global index for presyn gap junc - else: - preToPostId = sim.net.lastPointerId + + if sim.net.lastPointerId > sim.net.maxPointerIdPerNode: + print(f"WARNING: potential overflow of pointer connection id!") + preToPostId = sim.net.lastPointerId sim.net.lastPointerId += 1 # keep track of num of gap juncs in this node if pointerParams['bidirectional']: diff --git a/netpyne/network/network.py b/netpyne/network/network.py index ac1a34a04..3a909364a 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -26,6 +26,8 @@ class Network(object): # initialize variables # ----------------------------------------------------------------------------- def __init__(self, params=None): + from .. import sim + self.params = params # params that can be expressed using string-based functions in connections @@ -62,7 +64,11 @@ def __init__(self, params=None): {} ) # Empty dict for storing GID -> local index (key = gid; value = local id) -- ~x6 faster than .index() self.lastGid = 0 # keep track of last cell gid - self.lastPointerId = 0 # keep track of last gap junction gid + + # keep track of last gap junction gid + intMax = 2**(32-1) # pointer connection id in NEURON is signed 32-bit int + self.maxPointerIdPerNode = int(intMax / sim.nhosts) + self.lastPointerId = sim.rank * self.maxPointerIdPerNode # to avoid overlap of gids from different nodes # ----------------------------------------------------------------------------- # Set network params From 9e34541190655666f746da6c65106d16047ae42d Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Apr 2024 15:49:40 +0200 Subject: [PATCH 08/34] preSec and preLoc are no longer lost for inverse pointer connection --- CHANGES.md | 2 ++ netpyne/cell/compartCell.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 850f7d219..536b2cb54 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,6 +8,8 @@ - Fixed pointer id overflow on MPI (e.g. for gap junctions) +- preSec and preLoc are no longer lost for inverse pointer connection + # Version 1.0.6 **New features** diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index f15eeb853..4c123b94a 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -1114,6 +1114,8 @@ def __generatePointerIds(self, pointerParams, params): 'preGid': self.gid, 'sec': params.get('preSec', 'soma'), 'loc': params.get('preLoc', 0.5), + 'preSec': params.get('sec', 'soma'), + 'preLoc': params.get('loc', 0.5), 'weight': params.get('weight', 0.0), 'synMech': params['synMech'], '__preCellSidePointerParams__': preCellSideParams, From f4d5e1cfc5f5e71547574d773b91abc26a987ab2 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Apr 2024 15:56:47 +0200 Subject: [PATCH 09/34] cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule --- CHANGES.md | 2 ++ netpyne/cell/compartCell.py | 22 ++++++++++------------ netpyne/network/conn.py | 5 +++++ 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 536b2cb54..470d78e41 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,8 @@ **New features** +- cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule + **Bug fixes** - Better handling of exceptions in `importCellParams()` (incl. issue 782) diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index 4c123b94a..a7c47408a 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -1479,6 +1479,9 @@ def _setConnPointP(self, params, secLabels, weightIndex): def _setConnSynMechs(self, params, secLabels): from .. import sim + distributeSynsUniformly = params.get('distributeSynsUniformly', sim.cfg.distributeSynsUniformly) + connRandomSecFromList = params.get('connRandomSecFromList', sim.cfg.connRandomSecFromList) + synsPerConn = params['synsPerConn'] if not params.get('synMech'): if sim.net.params.synMechParams: # if no synMech specified, but some synMech params defined @@ -1515,20 +1518,17 @@ def _setConnSynMechs(self, params, secLabels): synMechLocs = [i * (1.0 / synsPerConn) + 1.0 / synsPerConn / 2 for i in range(synsPerConn)] else: # if multiple sections, distribute syns uniformly - if sim.cfg.distributeSynsUniformly: + if distributeSynsUniformly: synMechSecs, synMechLocs = self._distributeSynsUniformly(secList=secLabels, numSyns=synsPerConn) else: - if not sim.cfg.connRandomSecFromList and synsPerConn == len( - secLabels - ): # have list of secs that matches num syns + # have list of secs that matches num syns + if not connRandomSecFromList and synsPerConn == len(secLabels): synMechSecs = secLabels if isinstance(params['loc'], list): if len(params['loc']) == synsPerConn: # list of locs matches num syns synMechLocs = params['loc'] else: # list of locs does not match num syns - print( - "Error: The length of the list of locations does not match synsPerConn (with cfg.distributeSynsUniformly = False" - ) + print("Error: The length of the list of locations does not match synsPerConn (with distributeSynsUniformly = False)") return else: # single loc synMechLocs = [params['loc']] * synsPerConn @@ -1537,7 +1537,7 @@ def _setConnSynMechs(self, params, secLabels): synMechLocs = params['loc'] if isinstance(params['loc'], list) else [params['loc']] # randomize the section to connect to and move it to beginning of list - if sim.cfg.connRandomSecFromList and len(synMechSecs) >= synsPerConn: + if connRandomSecFromList and len(synMechSecs) >= synsPerConn: if len(synMechLocs) == 1: synMechLocs = [params['loc']] * synsPerConn rand = h.Random() @@ -1554,9 +1554,7 @@ def _setConnSynMechs(self, params, secLabels): rand.uniform(0, 1) synMechLocs = [rand.repick() for i in range(synsPerConn)] else: - print( - "\nError: The length of the list of sections needs to be greater or equal to the synsPerConn (with cfg.connRandomSecFromList = True" - ) + print("\nError: The length of the list of sections needs to be greater or equal to the synsPerConn (with connRandomSecFromList = True)") return else: # if 1 synapse @@ -1565,7 +1563,7 @@ def _setConnSynMechs(self, params, secLabels): synMechLocs = params['loc'] if isinstance(params['loc'], list) else [params['loc']] # randomize the section to connect to and move it to beginning of list - if sim.cfg.connRandomSecFromList and len(synMechSecs) > 1: + if connRandomSecFromList and len(synMechSecs) > 1: rand = h.Random() preGid = params['preGid'] if isinstance(params['preGid'], int) else 0 rand.Random123(sim.hashStr('connSynMechsSecs'), self.gid, preGid) # initialize randomizer diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 9710c5165..7ab130786 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -923,6 +923,11 @@ def _addCellConn(self, connParam, preCellGid, postCellGid, preCellsTags={}): if 'weightIndex' in connParam: params['weightIndex'] = connParam.get('weightIndex') + if 'distributeSynsUniformly' in connParam: + params['distributeSynsUniformly'] = connParam['distributeSynsUniformly'] + if 'connRandomSecFromList' in connParam: + params['connRandomSecFromList'] = connParam['connRandomSecFromList'] + isGapJunction = 'gapJunction' in connParam # deprecated way of defining gap junction if self.params.synMechParams.isPointerConn(params['synMech']) or isGapJunction: params['preLoc'] = connParam.get('preLoc') From 48e1815838d6ed96550dcd90707a9aee4599bcd9 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Apr 2024 16:09:53 +0200 Subject: [PATCH 10/34] ability to use `sec`, `loc`, `preSec` and `preLoc` from list in `connList`-type connParams (but so far, only if `synsPerConn == 1` - need to elaborate more general approach) --- netpyne/network/conn.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 7ab130786..16dee7130 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -840,6 +840,18 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if 'loc' in connParam and isinstance(connParam['loc'], list): connParam['locFromList'] = list(connParam['loc']) # if delay is a list, copy to locFromList + if connParam['synsPerConn'] == 1: + if isinstance(connParam.get('sec'), list): + connParam['secFromList'] = list(connParam['sec']) + else: + pass # TODO: needs consistent handling + + # for pointer connections (e.g. gap junctions) only: + if isinstance(connParam.get('preLoc'), list): + connParam['preLocFromList'] = list(connParam['preLoc']) + if isinstance(connParam.get('preSec'), list): + connParam['preSecFromList'] = list(connParam['preSec']) + for iconn, (relativePreId, relativePostId) in enumerate(connParam['connList']): # for each postsyn cell preCellGid = orderedPreGids[relativePreId] postCellGid = orderedPostGids[relativePostId] @@ -851,7 +863,14 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): connParam['delay'] = connParam['delayFromList'][iconn] if 'locFromList' in connParam: connParam['loc'] = connParam['locFromList'][iconn] - + if 'secFromList' in connParam: + connParam['sec'] = connParam['secFromList'][iconn] + if 'preLocFromList' in connParam: + connParam['preLoc'] = connParam['preLocFromList'][iconn] + if 'preSecFromList' in connParam: + connParam['preSec'] = connParam['preSecFromList'][iconn] + + # TODO: consider cfg.allowSelfConns? if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection From fc37963bcb19e4b8fa62597ba022be39679defc1 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Apr 2024 16:44:04 +0200 Subject: [PATCH 11/34] fixed escessive warning when estimating max pointer connections number --- netpyne/cell/compartCell.py | 2 +- netpyne/network/network.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index a7c47408a..2d01956ea 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -1091,7 +1091,7 @@ def __generatePointerIds(self, pointerParams, params): from .. import sim # see comments in `__parsePointerParams()` for more details - if sim.net.lastPointerId > sim.net.maxPointerIdPerNode: + if sim.net.lastPointerId > sim.net.maxPointerIdForGivenNode: print(f"WARNING: potential overflow of pointer connection id!") preToPostId = sim.net.lastPointerId sim.net.lastPointerId += 1 # keep track of num of gap juncs in this node diff --git a/netpyne/network/network.py b/netpyne/network/network.py index 3a909364a..bf3b9ad5f 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -67,8 +67,9 @@ def __init__(self, params=None): # keep track of last gap junction gid intMax = 2**(32-1) # pointer connection id in NEURON is signed 32-bit int - self.maxPointerIdPerNode = int(intMax / sim.nhosts) - self.lastPointerId = sim.rank * self.maxPointerIdPerNode # to avoid overlap of gids from different nodes + maxPointerIdPerNode = int(intMax / sim.nhosts) + self.lastPointerId = sim.rank * maxPointerIdPerNode # to avoid overlap of gids from different nodes + self.maxPointerIdForGivenNode = self.lastPointerId + maxPointerIdPerNode # ----------------------------------------------------------------------------- # Set network params From 3c471b59d5f8494a948141d6931242ea31fcc0ee Mon Sep 17 00:00:00 2001 From: Roman Baravalle <44242643+RomanB22@users.noreply.github.com> Date: Mon, 15 Apr 2024 12:04:48 -0500 Subject: [PATCH 12/34] Adding loading bar for net creation Adding loading bar for cell and connectivity creation using tqdm package --- netpyne/network/conn.py | 33 +++++++++++++++++++++++++++++---- netpyne/network/network.py | 10 +++++++--- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 9710c5165..308d2a946 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -23,7 +23,7 @@ import numpy as np from array import array as arrayFast from numbers import Number - +from tqdm import tqdm # ----------------------------------------------------------------------------- # Connect Cells @@ -418,6 +418,9 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (all-to-all connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -436,9 +439,11 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): } for postCellGid in postCellsTags: # for each postsyn cell + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() # ----------------------------------------------------------------------------- @@ -517,6 +522,9 @@ def probConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) allRands = self.generateRandsPrePost(preCellsTags, postCellsTags) @@ -550,13 +558,14 @@ def probConn(self, preCellsTags, postCellsTags, connParam): probMatrix, allRands, connParam['disynapticBias'], prePreGids, postPreGids ) for preCellGid, postCellGid in connGids: + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args connParam[paramStrFunc + 'Args'] = { k: v if isinstance(v, Number) else v(preCellsTags[preCellGid], postCellsTags[postCellGid]) for k, v in connParam[paramStrFunc + 'Vars'].items() } self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() # standard probabilistic conenctions else: # print('rank %d'%(sim.rank)) @@ -564,6 +573,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): # calculate the conn preGids of the each pre and post cell # for postCellGid,postCellTags in sorted(postCellsTags.items()): # for each postsyn cell for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell # for each postsyn cell + if sim.rank==0: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell probability = ( @@ -580,7 +590,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): ) # connParam[paramStrFunc+'Args'] = {k:v if isinstance(v, Number) else v(preCellTags,postCellTags) for k,v in connParam[paramStrFunc+'Vars'].items()} self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() # ----------------------------------------------------------------------------- # Generate random unique integers @@ -653,6 +663,9 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (convergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -672,6 +685,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): hashPreCells = sim.hashList(preCellsTagsKeys) for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node convergence = ( connParam['convergenceFunc'][postCellGid] @@ -704,6 +718,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() # ----------------------------------------------------------------------------- @@ -736,6 +751,9 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(preCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} presynaptic cells on node %i (divergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -755,6 +773,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): hashPostCells = sim.hashList(postCellsTagsKeys) for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) divergence = ( connParam['divergenceFunc'][preCellGid] if 'divergenceFunc' in connParam else connParam['divergence'] ) # num of presyn conns / postsyn cell @@ -781,6 +800,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() # ----------------------------------------------------------------------------- @@ -813,6 +833,9 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of connections from list (rule: %s) ...' % (connParam['label'])) + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(connParam['connList']), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} pairs of neurons on node %i (from list)' % sim.rank) orderedPreGids = sorted(preCellsTags) orderedPostGids = sorted(postCellsTags) @@ -841,6 +864,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): connParam['locFromList'] = list(connParam['loc']) # if delay is a list, copy to locFromList for iconn, (relativePreId, relativePostId) in enumerate(connParam['connList']): # for each postsyn cell + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) preCellGid = orderedPreGids[relativePreId] postCellGid = orderedPostGids[relativePostId] if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids @@ -854,7 +878,8 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + # ----------------------------------------------------------------------------- # Set parameters and create connection diff --git a/netpyne/network/network.py b/netpyne/network/network.py index 3a909364a..d89169f3d 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -13,7 +13,7 @@ standard_library.install_aliases() from ..specs import ODict from neuron import h # import NEURON - +from tqdm import tqdm class Network(object): """ @@ -100,14 +100,18 @@ def createCells(self): print(("\nCreating network of %i cell populations on %i hosts..." % (len(self.pops), sim.nhosts))) self._setDiversityRanges() # update fractions for rules - + if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(self.pops.values()), ascii=True, + desc="\nCreating network of %i cell populations on %i hosts..." % (len(self.pops), sim.nhosts), + position=-1, leave=True, + bar_format='{l_bar}{bar}|') #{n_fmt}/{total_fmt} populations created on node %i' % sim.rank) for ipop in list(self.pops.values()): # For each pop instantiate the network cells (objects of class 'Cell') + if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) newCells = ipop.createCells() # create cells for this pop using Pop method self.cells.extend(newCells) # add to list of cells sim.pc.barrier() if sim.rank == 0 and sim.cfg.verbose: print(('Instantiated %d cells of population %s' % (len(newCells), ipop.tags['pop']))) - + if sim.rank == 0 and not sim.cfg.verbose: pbar.close() if self.params.defineCellShapes: self.defineCellShapes() From a629a100d4a3365de799e4264ea0c2e7ab54c550 Mon Sep 17 00:00:00 2001 From: Roman Baravalle <44242643+RomanB22@users.noreply.github.com> Date: Thu, 18 Apr 2024 10:54:16 -0400 Subject: [PATCH 13/34] Update network.py Fixing double printing of Creating network sentence --- netpyne/network/network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netpyne/network/network.py b/netpyne/network/network.py index d89169f3d..157ca219c 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -96,7 +96,7 @@ def createCells(self): sim.pc.barrier() sim.timing('start', 'createTime') - if sim.rank == 0: + if sim.rank == 0 and sim.cfg.verbose: print(("\nCreating network of %i cell populations on %i hosts..." % (len(self.pops), sim.nhosts))) self._setDiversityRanges() # update fractions for rules From aa3e4664d4587d850cc29719f8098ceb1c0c8cec Mon Sep 17 00:00:00 2001 From: James Chen Date: Fri, 19 Apr 2024 09:52:41 -0400 Subject: [PATCH 14/34] define `cellInds` before reference in `syncLines` logic hotfix, PR? --- netpyne/plotting/plotRaster.py | 1 + 1 file changed, 1 insertion(+) diff --git a/netpyne/plotting/plotRaster.py b/netpyne/plotting/plotRaster.py index e4edc256b..8269cfbbd 100644 --- a/netpyne/plotting/plotRaster.py +++ b/netpyne/plotting/plotRaster.py @@ -392,6 +392,7 @@ def color(gid, _): # add spike lines if syncLines: + cellInds = list(set(spkInds)) rasterPlotter.axis.vlines(spkTimes, 0, len(cellInds), 'red', linewidth=0.1) # add legend From 1acd3e6f7826e9f12343672684fa705d9e34f41c Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 19 Apr 2024 17:47:35 +0200 Subject: [PATCH 15/34] updated CHANGES.md --- CHANGES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 470d78e41..952e2b211 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -12,6 +12,8 @@ - preSec and preLoc are no longer lost for inverse pointer connection +- syncLines in rasterPlot restored + # Version 1.0.6 **New features** From ff8bb97b2fa88ba59333057d151abbb6d7d2d821 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Mon, 29 Apr 2024 22:46:33 +0200 Subject: [PATCH 16/34] dropped python 2 support --- netpyne/analysis/csd.py | 9 --------- netpyne/analysis/dipole.py | 11 ----------- netpyne/analysis/filter.py | 5 ----- netpyne/analysis/info.py | 9 --------- netpyne/analysis/interactive.py | 8 -------- netpyne/analysis/lfp.py | 12 ------------ netpyne/analysis/lfp_orig.py | 12 ------------ netpyne/analysis/network.py | 15 --------------- netpyne/analysis/rxd.py | 8 -------- netpyne/analysis/spikes.py | 13 ------------- netpyne/analysis/spikes_legacy.py | 14 -------------- netpyne/analysis/tools.py | 13 ------------- netpyne/analysis/traces.py | 10 ---------- netpyne/analysis/utils.py | 12 ------------ netpyne/analysis/wrapper.py | 8 -------- netpyne/batch/__init__.py | 7 ------- netpyne/batch/asd_parallel.py | 14 -------------- netpyne/batch/batch.py | 14 -------------- netpyne/batch/evol.py | 13 ------------- netpyne/batch/grid.py | 14 -------------- netpyne/batch/optuna_parallel.py | 14 -------------- netpyne/batch/sbi_parallel.py | 12 ------------ netpyne/batch/utils.py | 8 -------- netpyne/cell/NML2Cell.py | 8 -------- netpyne/cell/NML2SpikeSource.py | 7 ------- netpyne/cell/__init__.py | 7 ------- netpyne/cell/cell.py | 12 ------------ netpyne/cell/compartCell.py | 16 ---------------- netpyne/cell/inputs.py | 8 -------- netpyne/cell/pointCell.py | 11 ----------- netpyne/conversion/__init__.py | 8 -------- netpyne/conversion/excel.py | 14 -------------- netpyne/conversion/neuromlFormat.py | 9 --------- netpyne/conversion/neuronPyHoc.py | 12 ------------ netpyne/conversion/pythonScript.py | 9 --------- netpyne/metadata/__init__.py | 7 ------- netpyne/metadata/api.py | 10 ---------- netpyne/metadata/metadata.py | 7 ------- netpyne/network/__init__.py | 7 ------- netpyne/network/conn.py | 12 ------------ netpyne/network/modify.py | 10 ---------- netpyne/network/netrxd.py | 12 ------------ netpyne/network/network.py | 8 -------- netpyne/network/pop.py | 10 ---------- netpyne/network/shape.py | 8 -------- netpyne/network/stim.py | 8 -------- netpyne/network/subconn.py | 14 -------------- netpyne/plotting/__init__.py | 8 -------- netpyne/plotting/plotShape.py | 15 --------------- netpyne/sim/__init__.py | 9 --------- netpyne/sim/gather.py | 8 -------- netpyne/sim/load.py | 12 ------------ netpyne/sim/run.py | 12 ------------ netpyne/sim/save.py | 11 ----------- netpyne/sim/setup.py | 10 ---------- netpyne/sim/utils.py | 13 ------------- netpyne/sim/wrappers.py | 8 -------- netpyne/specs/__init__.py | 8 -------- netpyne/specs/dicts.py | 11 ----------- netpyne/specs/netParams.py | 12 ------------ netpyne/specs/simConfig.py | 10 ---------- netpyne/specs/utils.py | 7 ------- netpyne/support/__init__.py | 8 -------- netpyne/support/bsmart.py | 9 +-------- netpyne/support/filter.py | 5 ----- netpyne/support/morlet.py | 5 ----- netpyne/support/morphology.py | 13 ------------- netpyne/support/recxelectrode.py | 9 --------- netpyne/support/scalebar.py | 7 ------- netpyne/support/stackedBarGraph.py | 11 ----------- netpyne/tests/checks.py | 12 ------------ netpyne/tests/tests.py | 9 --------- netpyne/tests/validate_tests.py | 9 --------- 73 files changed, 1 insertion(+), 739 deletions(-) diff --git a/netpyne/analysis/csd.py b/netpyne/analysis/csd.py index ba61f7100..25cbb0ffb 100644 --- a/netpyne/analysis/csd.py +++ b/netpyne/analysis/csd.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() - try: basestring except NameError: diff --git a/netpyne/analysis/dipole.py b/netpyne/analysis/dipole.py index 165467d4b..46c9da25c 100644 --- a/netpyne/analysis/dipole.py +++ b/netpyne/analysis/dipole.py @@ -3,24 +3,13 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - import os -from builtins import range -from builtins import round -from builtins import str try: basestring except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/filter.py b/netpyne/analysis/filter.py index 6e907cd85..2f812fc00 100644 --- a/netpyne/analysis/filter.py +++ b/netpyne/analysis/filter.py @@ -21,11 +21,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - import warnings import numpy as np diff --git a/netpyne/analysis/info.py b/netpyne/analysis/info.py index 76ef99d29..2871dc920 100644 --- a/netpyne/analysis/info.py +++ b/netpyne/analysis/info.py @@ -3,20 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - try: basestring except NameError: basestring = str -from builtins import zip -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/interactive.py b/netpyne/analysis/interactive.py index cf2b478b8..3079d3cf1 100644 --- a/netpyne/analysis/interactive.py +++ b/netpyne/analysis/interactive.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ try: diff --git a/netpyne/analysis/lfp.py b/netpyne/analysis/lfp.py index 56dd2b7e6..8f2c43fe4 100644 --- a/netpyne/analysis/lfp.py +++ b/netpyne/analysis/lfp.py @@ -3,23 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import range -from builtins import round -from builtins import str - try: basestring except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/lfp_orig.py b/netpyne/analysis/lfp_orig.py index e68ab66cd..d124ebc25 100644 --- a/netpyne/analysis/lfp_orig.py +++ b/netpyne/analysis/lfp_orig.py @@ -3,23 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import range -from builtins import round -from builtins import str - try: basestring except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/network.py b/netpyne/analysis/network.py index 068451b7f..8426feafe 100644 --- a/netpyne/analysis/network.py +++ b/netpyne/analysis/network.py @@ -3,26 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import open -from builtins import next -from builtins import range -from builtins import str - try: basestring except NameError: basestring = str -from builtins import zip - -from builtins import round -from future import standard_library -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/rxd.py b/netpyne/analysis/rxd.py index 3678d6171..75e0fcfbf 100644 --- a/netpyne/analysis/rxd.py +++ b/netpyne/analysis/rxd.py @@ -3,19 +3,11 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - try: basestring except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/spikes.py b/netpyne/analysis/spikes.py index 98fc65297..5235bc00b 100644 --- a/netpyne/analysis/spikes.py +++ b/netpyne/analysis/spikes.py @@ -3,19 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() - -from builtins import round -from builtins import open -from builtins import range - try: to_unicode = unicode except NameError: diff --git a/netpyne/analysis/spikes_legacy.py b/netpyne/analysis/spikes_legacy.py index 66e92bc2f..e35c2c0e8 100755 --- a/netpyne/analysis/spikes_legacy.py +++ b/netpyne/analysis/spikes_legacy.py @@ -3,25 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import dict -from builtins import round -from builtins import str - try: basestring except NameError: basestring = str -from builtins import range -from builtins import zip - -from future import standard_library -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/tools.py b/netpyne/analysis/tools.py index 9d68db49f..0a11e507e 100644 --- a/netpyne/analysis/tools.py +++ b/netpyne/analysis/tools.py @@ -3,19 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() - -from builtins import round -from builtins import open -from builtins import range - try: to_unicode = unicode except NameError: diff --git a/netpyne/analysis/traces.py b/netpyne/analysis/traces.py index 517428287..0c4b9f38f 100644 --- a/netpyne/analysis/traces.py +++ b/netpyne/analysis/traces.py @@ -3,21 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import range -from builtins import str - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/utils.py b/netpyne/analysis/utils.py index 2b49515f0..bfc666d27 100644 --- a/netpyne/analysis/utils.py +++ b/netpyne/analysis/utils.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import round -from builtins import open -from builtins import range - # required to make json saving work in Python 2/3 try: to_unicode = unicode @@ -22,9 +13,6 @@ except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/analysis/wrapper.py b/netpyne/analysis/wrapper.py index 701b0ffd2..93626d4cf 100644 --- a/netpyne/analysis/wrapper.py +++ b/netpyne/analysis/wrapper.py @@ -3,14 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ try: diff --git a/netpyne/batch/__init__.py b/netpyne/batch/__init__.py index 74580c1ae..62da2bb9b 100644 --- a/netpyne/batch/__init__.py +++ b/netpyne/batch/__init__.py @@ -3,11 +3,4 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from .batch import Batch diff --git a/netpyne/batch/asd_parallel.py b/netpyne/batch/asd_parallel.py index 8e7ccdbdd..45ef1e9a1 100644 --- a/netpyne/batch/asd_parallel.py +++ b/netpyne/batch/asd_parallel.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str -from future import standard_library - -standard_library.install_aliases() - # required to make json saving work in Python 2/3 try: to_unicode = unicode diff --git a/netpyne/batch/batch.py b/netpyne/batch/batch.py index a3eac6b41..9a5de9210 100644 --- a/netpyne/batch/batch.py +++ b/netpyne/batch/batch.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str -from future import standard_library - -standard_library.install_aliases() - # required to make json saving work in Python 2/3 try: to_unicode = unicode diff --git a/netpyne/batch/evol.py b/netpyne/batch/evol.py index e74521fce..f9622d246 100644 --- a/netpyne/batch/evol.py +++ b/netpyne/batch/evol.py @@ -3,20 +3,7 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str from ctypes import util -from future import standard_library - -standard_library.install_aliases() # required to make json saving work in Python 2/3 try: diff --git a/netpyne/batch/grid.py b/netpyne/batch/grid.py index 9d214142c..dee221e96 100644 --- a/netpyne/batch/grid.py +++ b/netpyne/batch/grid.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str -from future import standard_library - -standard_library.install_aliases() - # required to make json saving work in Python 2/3 try: to_unicode = unicode diff --git a/netpyne/batch/optuna_parallel.py b/netpyne/batch/optuna_parallel.py index a0abee973..7af3793d2 100644 --- a/netpyne/batch/optuna_parallel.py +++ b/netpyne/batch/optuna_parallel.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str -from future import standard_library - -standard_library.install_aliases() - # required to make json saving work in Python 2/3 try: to_unicode = unicode diff --git a/netpyne/batch/sbi_parallel.py b/netpyne/batch/sbi_parallel.py index 5700548da..888c797e2 100644 --- a/netpyne/batch/sbi_parallel.py +++ b/netpyne/batch/sbi_parallel.py @@ -2,20 +2,8 @@ Module for SBI optimization """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import zip - -from builtins import range -from builtins import open -from builtins import str from lib2to3.pytree import NegatedPattern -from future import standard_library -standard_library.install_aliases() # required to make json saving work in Python 2/3 try: diff --git a/netpyne/batch/utils.py b/netpyne/batch/utils.py index 3ddfdc514..c77e26ea1 100644 --- a/netpyne/batch/utils.py +++ b/netpyne/batch/utils.py @@ -3,16 +3,8 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import import builtins -from future import standard_library - -standard_library.install_aliases() - import numpy as np import json import pickle diff --git a/netpyne/cell/NML2Cell.py b/netpyne/cell/NML2Cell.py index a63eb2fa3..92150a696 100644 --- a/netpyne/cell/NML2Cell.py +++ b/netpyne/cell/NML2Cell.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from copy import deepcopy from neuron import h # Import NEURON import numpy as np diff --git a/netpyne/cell/NML2SpikeSource.py b/netpyne/cell/NML2SpikeSource.py index b257cb687..567870c34 100644 --- a/netpyne/cell/NML2SpikeSource.py +++ b/netpyne/cell/NML2SpikeSource.py @@ -3,13 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from neuron import h # Import NEURON from .compartCell import CompartCell from ..specs import Dict diff --git a/netpyne/cell/__init__.py b/netpyne/cell/__init__.py index 9731ff6b1..74a6d1ed2 100644 --- a/netpyne/cell/__init__.py +++ b/netpyne/cell/__init__.py @@ -3,13 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from .compartCell import CompartCell from .pointCell import PointCell from .NML2Cell import NML2Cell diff --git a/netpyne/cell/cell.py b/netpyne/cell/cell.py index de12a5923..0f85b66c2 100644 --- a/netpyne/cell/cell.py +++ b/netpyne/cell/cell.py @@ -3,23 +3,11 @@ """ -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import - - -from builtins import zip -from builtins import next -from builtins import str - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() from numbers import Number from copy import deepcopy from neuron import h # Import NEURON diff --git a/netpyne/cell/compartCell.py b/netpyne/cell/compartCell.py index 2d01956ea..eeaeecca7 100644 --- a/netpyne/cell/compartCell.py +++ b/netpyne/cell/compartCell.py @@ -3,19 +3,6 @@ """ -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import super -from builtins import next -from builtins import zip -from builtins import range - -from builtins import round -from builtins import str - from netpyne.specs.netParams import CellParams, SynMechParams try: @@ -23,9 +10,6 @@ except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from numbers import Number from copy import deepcopy from neuron import h # Import NEURON diff --git a/netpyne/cell/inputs.py b/netpyne/cell/inputs.py index 8c525666b..6ea1dcd99 100644 --- a/netpyne/cell/inputs.py +++ b/netpyne/cell/inputs.py @@ -3,14 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from numbers import Number try: diff --git a/netpyne/cell/pointCell.py b/netpyne/cell/pointCell.py index 86a921b5a..5d6dcd84a 100644 --- a/netpyne/cell/pointCell.py +++ b/netpyne/cell/pointCell.py @@ -7,23 +7,12 @@ Contributors: salvadordura@gmail.com, samnemo@gmail.com """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import super -from builtins import zip -from builtins import range try: basestring except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from copy import deepcopy from neuron import h # Import NEURON import numpy as np diff --git a/netpyne/conversion/__init__.py b/netpyne/conversion/__init__.py index ec3c4a5b9..1c3149ace 100644 --- a/netpyne/conversion/__init__.py +++ b/netpyne/conversion/__init__.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() - from .neuronPyHoc import importCell, importCellsFromNet, mechVarList, getSecName from .pythonScript import createPythonScript, createPythonNetParams, createPythonSimConfig from .excel import importConnFromExcel diff --git a/netpyne/conversion/excel.py b/netpyne/conversion/excel.py index 6bdffa029..86e2da5f5 100644 --- a/netpyne/conversion/excel.py +++ b/netpyne/conversion/excel.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - - -from builtins import open -from builtins import range -from builtins import str -from future import standard_library - -standard_library.install_aliases() - - def importConnFromExcel(fileName, sheetName): """ Function for/to diff --git a/netpyne/conversion/neuromlFormat.py b/netpyne/conversion/neuromlFormat.py index 1df0d12ab..8a4b2e053 100644 --- a/netpyne/conversion/neuromlFormat.py +++ b/netpyne/conversion/neuromlFormat.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - - -from builtins import str - -from builtins import range - try: import neuroml from pyneuroml import pynml diff --git a/netpyne/conversion/neuronPyHoc.py b/netpyne/conversion/neuronPyHoc.py index 737c5f3ac..6f10a70ca 100644 --- a/netpyne/conversion/neuronPyHoc.py +++ b/netpyne/conversion/neuronPyHoc.py @@ -3,18 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import -from builtins import range -from builtins import dict - -from builtins import zip -from builtins import str -from future import standard_library - -standard_library.install_aliases() import os, sys, signal from numbers import Number from neuron import h diff --git a/netpyne/conversion/pythonScript.py b/netpyne/conversion/pythonScript.py index 462e29d99..9e4ec6ab0 100644 --- a/netpyne/conversion/pythonScript.py +++ b/netpyne/conversion/pythonScript.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import open -from future import standard_library - -standard_library.install_aliases() from netpyne import __version__ diff --git a/netpyne/metadata/__init__.py b/netpyne/metadata/__init__.py index 7034d7f5a..8508d053c 100644 --- a/netpyne/metadata/__init__.py +++ b/netpyne/metadata/__init__.py @@ -3,11 +3,4 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from .metadata import metadata diff --git a/netpyne/metadata/api.py b/netpyne/metadata/api.py index 86ce34571..caf63ef55 100644 --- a/netpyne/metadata/api.py +++ b/netpyne/metadata/api.py @@ -3,16 +3,6 @@ """ - -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() - - def merge(source, destination): for key, value in list(source.items()): if isinstance(value, dict): diff --git a/netpyne/metadata/metadata.py b/netpyne/metadata/metadata.py index e4bc4d9c5..92d63a691 100644 --- a/netpyne/metadata/metadata.py +++ b/netpyne/metadata/metadata.py @@ -3,13 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() metadata = { # --------------------------------------------------------------------------------------------------------------------- # netParams diff --git a/netpyne/network/__init__.py b/netpyne/network/__init__.py index dee2888a3..288114749 100644 --- a/netpyne/network/__init__.py +++ b/netpyne/network/__init__.py @@ -3,12 +3,5 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from .network import Network from .pop import Pop diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 16dee7130..4c78d2f3f 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -3,23 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import dict -from builtins import range - -from builtins import round - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() import numpy as np from array import array as arrayFast from numbers import Number diff --git a/netpyne/network/modify.py b/netpyne/network/modify.py index f9c793c03..684204315 100644 --- a/netpyne/network/modify.py +++ b/netpyne/network/modify.py @@ -3,19 +3,9 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - - # ----------------------------------------------------------------------------- # Modify cell params # ----------------------------------------------------------------------------- -from future import standard_library - -standard_library.install_aliases() - def modifyCells(self, params, updateMasterAllCells=False): """ diff --git a/netpyne/network/netrxd.py b/netpyne/network/netrxd.py index 57c5bddeb..38e04ea66 100644 --- a/netpyne/network/netrxd.py +++ b/netpyne/network/netrxd.py @@ -3,23 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import dict -from builtins import range - -from builtins import round - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() import copy try: diff --git a/netpyne/network/network.py b/netpyne/network/network.py index bf3b9ad5f..9a87e579b 100644 --- a/netpyne/network/network.py +++ b/netpyne/network/network.py @@ -3,14 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from ..specs import ODict from neuron import h # import NEURON diff --git a/netpyne/network/pop.py b/netpyne/network/pop.py index be868610e..f20e4416d 100644 --- a/netpyne/network/pop.py +++ b/netpyne/network/pop.py @@ -3,21 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import map -from builtins import range - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() from numpy import pi, sqrt, sin, cos, arccos import numpy as np from neuron import h # Import NEURON diff --git a/netpyne/network/shape.py b/netpyne/network/shape.py index f09f2a1cc..8f8f1920c 100644 --- a/netpyne/network/shape.py +++ b/netpyne/network/shape.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from neuron import h # ----------------------------------------------------------------------------- diff --git a/netpyne/network/stim.py b/netpyne/network/stim.py index 20d002304..242bb4931 100644 --- a/netpyne/network/stim.py +++ b/netpyne/network/stim.py @@ -3,14 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from numbers import Number try: diff --git a/netpyne/network/subconn.py b/netpyne/network/subconn.py index f91b0cc07..e9b21ce8f 100644 --- a/netpyne/network/subconn.py +++ b/netpyne/network/subconn.py @@ -3,20 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import zip -from builtins import range - -from builtins import round -from builtins import next -from builtins import str -from future import standard_library - -standard_library.install_aliases() import numpy as np from neuron import h diff --git a/netpyne/plotting/__init__.py b/netpyne/plotting/__init__.py index 3576f367d..4f90ab992 100644 --- a/netpyne/plotting/__init__.py +++ b/netpyne/plotting/__init__.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() import warnings warnings.filterwarnings("ignore") diff --git a/netpyne/plotting/plotShape.py b/netpyne/plotting/plotShape.py index 3ed443c2b..60aac3a1f 100644 --- a/netpyne/plotting/plotShape.py +++ b/netpyne/plotting/plotShape.py @@ -3,26 +3,11 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import open -from builtins import next -from builtins import range -from builtins import str - try: basestring except NameError: basestring = str -from builtins import zip - -from builtins import round -from future import standard_library -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/sim/__init__.py b/netpyne/sim/__init__.py index d77b648f7..d43f0f007 100644 --- a/netpyne/sim/__init__.py +++ b/netpyne/sim/__init__.py @@ -5,17 +5,8 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - # check for -nogui option -from future import standard_library - -standard_library.install_aliases() import sys - if '-nogui' in sys.argv: import netpyne diff --git a/netpyne/sim/gather.py b/netpyne/sim/gather.py index 14411678c..e5872147a 100644 --- a/netpyne/sim/gather.py +++ b/netpyne/sim/gather.py @@ -3,18 +3,10 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - import os, pickle -from builtins import zip -from future import standard_library from netpyne.support.recxelectrode import RecXElectrode -standard_library.install_aliases() import numpy as np from ..specs import Dict, ODict from . import setup diff --git a/netpyne/sim/load.py b/netpyne/sim/load.py index 6c7a76ad9..6753ff181 100644 --- a/netpyne/sim/load.py +++ b/netpyne/sim/load.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import open -from builtins import range - -# required to make json saving work in Python 2/3 try: to_unicode = unicode except NameError: @@ -21,9 +12,6 @@ except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() import sys from collections import OrderedDict from ..specs import Dict, ODict diff --git a/netpyne/sim/run.py b/netpyne/sim/run.py index cd3d9ced0..88544d066 100644 --- a/netpyne/sim/run.py +++ b/netpyne/sim/run.py @@ -3,18 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import round -from builtins import range - -from builtins import str -from future import standard_library - -standard_library.install_aliases() import numpy as np from neuron import h, init # Import NEURON from . import utils diff --git a/netpyne/sim/save.py b/netpyne/sim/save.py index 05e31ee9f..3bae90a9c 100644 --- a/netpyne/sim/save.py +++ b/netpyne/sim/save.py @@ -3,17 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import range -from builtins import open -from future import standard_library - -standard_library.install_aliases() - # required to make json saving work in Python 2/3 try: to_unicode = unicode diff --git a/netpyne/sim/setup.py b/netpyne/sim/setup.py index ea7879f2f..4c1094cdc 100644 --- a/netpyne/sim/setup.py +++ b/netpyne/sim/setup.py @@ -3,16 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -# -from builtins import str -from future import standard_library - -standard_library.install_aliases() import sys import os import numpy as np diff --git a/netpyne/sim/utils.py b/netpyne/sim/utils.py index c8af8108f..e233a4adc 100644 --- a/netpyne/sim/utils.py +++ b/netpyne/sim/utils.py @@ -3,26 +3,13 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - - -from builtins import next -from builtins import dict -from builtins import map -from builtins import str - from netpyne.support.recxelectrode import RecXElectrode try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() from time import time import hashlib import array diff --git a/netpyne/sim/wrappers.py b/netpyne/sim/wrappers.py index c1b4b7712..ed90c680c 100644 --- a/netpyne/sim/wrappers.py +++ b/netpyne/sim/wrappers.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() - # ------------------------------------------------------------------------------ # Wrapper to create network # ------------------------------------------------------------------------------ diff --git a/netpyne/specs/__init__.py b/netpyne/specs/__init__.py index 464e7b367..6064f40e0 100644 --- a/netpyne/specs/__init__.py +++ b/netpyne/specs/__init__.py @@ -3,14 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() from .dicts import Dict, ODict from .netParams import NetParams, CellParams from .simConfig import SimConfig diff --git a/netpyne/specs/dicts.py b/netpyne/specs/dicts.py index ac6ae1763..f610f9f4a 100644 --- a/netpyne/specs/dicts.py +++ b/netpyne/specs/dicts.py @@ -5,17 +5,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from builtins import range -from builtins import dict -from builtins import super -from future import standard_library - -standard_library.install_aliases() from collections import OrderedDict # ---------------------------------------------------------------------------- diff --git a/netpyne/specs/netParams.py b/netpyne/specs/netParams.py index 9a21854db..72643fe50 100644 --- a/netpyne/specs/netParams.py +++ b/netpyne/specs/netParams.py @@ -3,15 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - -from builtins import next -from builtins import open -from builtins import range - # required to make json saving work in Python 2/3 try: to_unicode = unicode @@ -23,9 +14,6 @@ except NameError: basestring = str -from future import standard_library - -standard_library.install_aliases() from collections import OrderedDict from .dicts import Dict, ODict from .. import conversion diff --git a/netpyne/specs/simConfig.py b/netpyne/specs/simConfig.py index 5024c43f5..45a1c475a 100644 --- a/netpyne/specs/simConfig.py +++ b/netpyne/specs/simConfig.py @@ -3,22 +3,12 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - # required to make json saving work in Python 2/3 try: to_unicode = unicode except NameError: to_unicode = str -from builtins import open -from future import standard_library - -standard_library.install_aliases() - from collections import OrderedDict from .dicts import Dict, ODict diff --git a/netpyne/specs/utils.py b/netpyne/specs/utils.py index 3107da6d8..1488dfed1 100644 --- a/netpyne/specs/utils.py +++ b/netpyne/specs/utils.py @@ -3,13 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from numbers import Number from neuron import h from numpy import array, sin, cos, tan, exp, remainder, sqrt, arctan2, pi, mean, inf, dstack, unravel_index, argsort, zeros, ceil, copy, log, log10 diff --git a/netpyne/support/__init__.py b/netpyne/support/__init__.py index 4985f96a3..b28ac292c 100644 --- a/netpyne/support/__init__.py +++ b/netpyne/support/__init__.py @@ -4,11 +4,3 @@ This subpackage contains external modules or packages required by NetPyNE which are either not available via pip or have been slightly modified. """ - -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() diff --git a/netpyne/support/bsmart.py b/netpyne/support/bsmart.py index 10a9dedcb..caa99f64c 100644 --- a/netpyne/support/bsmart.py +++ b/netpyne/support/bsmart.py @@ -42,15 +42,8 @@ Version: 2019jun17 by Cliff Kerr (cliff@thekerrlab.com) """ -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import -import numpy as np -from builtins import range -from future import standard_library -standard_library.install_aliases() +import numpy as np # ARMORF -- AR parameter estimation via LWR method modified by Morf. # diff --git a/netpyne/support/filter.py b/netpyne/support/filter.py index 237801179..93cc97f18 100644 --- a/netpyne/support/filter.py +++ b/netpyne/support/filter.py @@ -21,11 +21,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - import warnings import numpy as np diff --git a/netpyne/support/morlet.py b/netpyne/support/morlet.py index a58b1b793..abbbf911d 100644 --- a/netpyne/support/morlet.py +++ b/netpyne/support/morlet.py @@ -10,11 +10,6 @@ subtract mean from time series within the wavelet class) """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - from netpyne import __gui__ if __gui__: import matplotlib.pyplot as plt diff --git a/netpyne/support/morphology.py b/netpyne/support/morphology.py index 4b2025625..67ec1bccc 100644 --- a/netpyne/support/morphology.py +++ b/netpyne/support/morphology.py @@ -3,21 +3,8 @@ """ -from __future__ import division -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import absolute_import - # Code adapted from https://github.com/ahwillia/PyNeuron-Toolbox under MIT license - -from builtins import zip -from builtins import range - -from future import standard_library - -standard_library.install_aliases() -from builtins import object import numpy as np import pylab as plt from netpyne import __gui__ diff --git a/netpyne/support/recxelectrode.py b/netpyne/support/recxelectrode.py index 7c2a87746..42b1c2605 100644 --- a/netpyne/support/recxelectrode.py +++ b/netpyne/support/recxelectrode.py @@ -3,11 +3,6 @@ """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals -from __future__ import absolute_import - # Allen Institute Software License - This software license is the 2-clause BSD license plus clause a third # clause that prohibits redistribution for commercial purposes without further permission. # @@ -38,10 +33,6 @@ # Adapted to NetPyNE by salvadordura@gmail.com # -from builtins import range -from future import standard_library - -standard_library.install_aliases() import numpy as np import math diff --git a/netpyne/support/scalebar.py b/netpyne/support/scalebar.py index 786835f04..fe71441cd 100644 --- a/netpyne/support/scalebar.py +++ b/netpyne/support/scalebar.py @@ -9,13 +9,6 @@ # Adapted from mpl_toolkits.axes_grid1 # LICENSE: Python Software Foundation (http://docs.python.org/license.html) -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future import standard_library - -standard_library.install_aliases() from netpyne import __gui__ if __gui__: diff --git a/netpyne/support/stackedBarGraph.py b/netpyne/support/stackedBarGraph.py index 176c85bf2..b6a865f97 100644 --- a/netpyne/support/stackedBarGraph.py +++ b/netpyne/support/stackedBarGraph.py @@ -25,17 +25,6 @@ # # ############################################################################### -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import absolute_import -from builtins import range -from builtins import str -from future import standard_library - -standard_library.install_aliases() -from builtins import object - __author__ = "Michael Imelfort; modified by salvadordura@gmail.com" __copyright__ = "Copyright 2014" __credits__ = ["Michael Imelfort"] diff --git a/netpyne/tests/checks.py b/netpyne/tests/checks.py index d02d40a0b..edd788b91 100644 --- a/netpyne/tests/checks.py +++ b/netpyne/tests/checks.py @@ -3,18 +3,6 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -# checks.py - -from future import standard_library - -standard_library.install_aliases() - - def checkOutput(modelName, verbose=False): """ Function to compare the output of tutorials and examples with their expected output diff --git a/netpyne/tests/tests.py b/netpyne/tests/tests.py index 4ee255196..c78679014 100644 --- a/netpyne/tests/tests.py +++ b/netpyne/tests/tests.py @@ -3,20 +3,11 @@ """ -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from builtins import str - try: basestring except NameError: basestring = str -from future import standard_library -standard_library.install_aliases() import unittest import numbers import sys diff --git a/netpyne/tests/validate_tests.py b/netpyne/tests/validate_tests.py index c2678abe0..63cc3edb0 100644 --- a/netpyne/tests/validate_tests.py +++ b/netpyne/tests/validate_tests.py @@ -3,15 +3,6 @@ """ -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import - -from future import standard_library - -standard_library.install_aliases() -from builtins import object from .tests import * import netpyne.specs as specs From e3eec49ca62b6b12f8d8a41b167e152fb4c210b6 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 3 May 2024 17:54:40 +0200 Subject: [PATCH 17/34] added tqdm (progress bar) to dependency list --- CHANGES.md | 2 ++ setup.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 952e2b211..9d51cc57e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,8 @@ **New features** +- Added progress-bar indicating network creation progress + - cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule **Bug fixes** diff --git a/setup.py b/setup.py index 754ffcf13..bb877427c 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,7 @@ # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html - install_requires=["numpy", "scipy", "matplotlib", "matplotlib-scalebar", "future", "pandas", "bokeh", "schema", "lfpykit"], + install_requires=["numpy", "scipy", "matplotlib", "matplotlib-scalebar", "future", "pandas", "bokeh", "schema", "lfpykit", "tqdm"], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax, # for example: From 703435cdd34f6ab0f0a7453e85eacfd8555239f6 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 3 May 2024 18:02:40 +0200 Subject: [PATCH 18/34] Fixed a bug in `gatherDataFromFiles()` where cellGids for node 0 were lost --- CHANGES.md | 2 ++ netpyne/sim/gather.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 9d51cc57e..6881d80f9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -16,6 +16,8 @@ - syncLines in rasterPlot restored +- Fixed a bug in `gatherDataFromFiles()` where cellGids for node 0 were lost + # Version 1.0.6 **New features** diff --git a/netpyne/sim/gather.py b/netpyne/sim/gather.py index e5872147a..566ba379a 100644 --- a/netpyne/sim/gather.py +++ b/netpyne/sim/gather.py @@ -499,9 +499,9 @@ def sort(popKeyAndValue): for key in singleNodeVecs: allSimData[key] = list(fileData['simData'][key]) allPopsCellGids = {popLabel: [] for popLabel in nodePopsCellGids} - else: - for popLabel, popCellGids in nodePopsCellGids.items(): - allPopsCellGids[popLabel].extend(popCellGids) + + for popLabel, popCellGids in nodePopsCellGids.items(): + allPopsCellGids[popLabel].extend(popCellGids) mergedFiles.append(file) From 6f16803c42d7f63365d073fb3a89a023c96a9cb7 Mon Sep 17 00:00:00 2001 From: jchen6727 Date: Sat, 11 May 2024 01:16:35 -0500 Subject: [PATCH 19/34] fixed bug in conn, updated test/examples/* to use dynamic pathing (#817) * fixed bug in conn, updated test/examples/* to use dynamic pathing * update CHANGES.md --- CHANGES.md | 2 ++ netpyne/network/conn.py | 2 +- tests/examples/test_HHTut.py | 5 +++-- tests/examples/test_HybridTut.py | 6 ++++-- tests/examples/test_LFPrecording.py | 6 +++--- tests/examples/test_M1.py | 6 +++--- tests/examples/test_NeuroMLImport.py | 6 +++--- tests/examples/test_PTcell.py | 6 +++--- tests/examples/test_batchCell.py | 6 +++--- tests/examples/test_evolCell.py | 6 +++--- tests/examples/test_rxd_buffering.py | 5 +++-- tests/examples/test_rxd_net.py | 6 +++--- tests/examples/test_saving.py | 6 +++--- tests/examples/utils.py | 4 +++- 14 files changed, 40 insertions(+), 32 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 6881d80f9..7c4219c2f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -6,6 +6,8 @@ - cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule +- Updated tests.examples.utils to allow for dynamic pathing + **Bug fixes** - Better handling of exceptions in `importCellParams()` (incl. issue 782) diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 6e006765e..079631e07 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -511,7 +511,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, + desc=' ' + str(connParam['label']), position=0, leave=True, bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) allRands = self.generateRandsPrePost(preCellsTags, postCellsTags) diff --git a/tests/examples/test_HHTut.py b/tests/examples/test_HHTut.py index c2abbe325..253ba73dc 100644 --- a/tests/examples/test_HHTut.py +++ b/tests/examples/test_HHTut.py @@ -4,9 +4,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR -@pytest.mark.package_data(['examples/HHTut/', None]) +package_dir = NETPYNE_DIR + '/examples/HHTut/' +@pytest.mark.package_data([package_dir, None]) class TestHHTut(): def test_run(self, pkg_setup): import src.init diff --git a/tests/examples/test_HybridTut.py b/tests/examples/test_HybridTut.py index 9fc96136e..1490cea87 100644 --- a/tests/examples/test_HybridTut.py +++ b/tests/examples/test_HybridTut.py @@ -5,9 +5,11 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR -@pytest.mark.package_data(['examples/HybridTut/', 'mod']) +package_dir = NETPYNE_DIR + '/examples/HybridTut/' + +@pytest.mark.package_data([package_dir, 'mod']) class TestHybridTut: def test_run(self, pkg_setup): import src.init diff --git a/tests/examples/test_LFPrecording.py b/tests/examples/test_LFPrecording.py index 3ec68b134..42e235d0f 100644 --- a/tests/examples/test_LFPrecording.py +++ b/tests/examples/test_LFPrecording.py @@ -5,10 +5,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/LFPrecording/', 'mod']) +package_dir = NETPYNE_DIR + '/examples/LFPrecording/' +@pytest.mark.package_data([package_dir, 'mod']) class TestLFPrecording: def test_cell_lfp(self, pkg_setup): import src.cell.init diff --git a/tests/examples/test_M1.py b/tests/examples/test_M1.py index 6af398481..981df1f3a 100644 --- a/tests/examples/test_M1.py +++ b/tests/examples/test_M1.py @@ -5,10 +5,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/M1/', 'mod']) +package_dir = NETPYNE_DIR + '/examples/M1/' +@pytest.mark.package_data([package_dir, 'mod']) class TestM1: def test_run(self, pkg_setup): import src.init diff --git a/tests/examples/test_NeuroMLImport.py b/tests/examples/test_NeuroMLImport.py index fe540b4c6..361fb864f 100644 --- a/tests/examples/test_NeuroMLImport.py +++ b/tests/examples/test_NeuroMLImport.py @@ -5,10 +5,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/NeuroMLImport/', '.']) +package_dir = NETPYNE_DIR + '/examples/NeuroMLImport/' +@pytest.mark.package_data([package_dir, '.']) class TestPTcell: def test_init(self, pkg_setup): import SimpleNet_import diff --git a/tests/examples/test_PTcell.py b/tests/examples/test_PTcell.py index a7599f9e0..0e3b5a793 100644 --- a/tests/examples/test_PTcell.py +++ b/tests/examples/test_PTcell.py @@ -5,10 +5,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/PTcell/', 'mod']) +package_dir = NETPYNE_DIR + '/examples/PTcell/' +@pytest.mark.package_data([package_dir, 'mod']) class TestPTcell: def test_init(self, pkg_setup): import src.init diff --git a/tests/examples/test_batchCell.py b/tests/examples/test_batchCell.py index 9ce282143..ac2898e21 100644 --- a/tests/examples/test_batchCell.py +++ b/tests/examples/test_batchCell.py @@ -8,10 +8,10 @@ if "-nogui" not in sys.argv: sys.argv.append("-nogui") -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(["examples/batchCell/", "mod"]) +package_dir = NETPYNE_DIR + "/examples/batchCell/" +@pytest.mark.package_data([package_dir, "mod"]) class TestBatchCell: def batch_run(self, pkg_setup): """run a reduced version of the batchCell example""" diff --git a/tests/examples/test_evolCell.py b/tests/examples/test_evolCell.py index cf82f05b6..8c171bf35 100644 --- a/tests/examples/test_evolCell.py +++ b/tests/examples/test_evolCell.py @@ -9,10 +9,10 @@ if "-nogui" not in sys.argv: sys.argv.append("-nogui") -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(["examples/evolCell/", "mod"]) +package_dir = NETPYNE_DIR + "/examples/evolCell/" +@pytest.mark.package_data([package_dir, "mod"]) class TestEvolCell: def evol_run(self, pkg_setup): """run a reduced version of the evolCell example""" diff --git a/tests/examples/test_rxd_buffering.py b/tests/examples/test_rxd_buffering.py index ce6bb14d7..dab5ab15d 100644 --- a/tests/examples/test_rxd_buffering.py +++ b/tests/examples/test_rxd_buffering.py @@ -3,9 +3,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR -@pytest.mark.package_data(['examples/rxd_buffering/', None]) +package_dir = NETPYNE_DIR + '/examples/rxd_buffering/' +@pytest.mark.package_data([package_dir, None]) class Test_rxd_buffering(): def test_buffering(self, pkg_setup): import src.init diff --git a/tests/examples/test_rxd_net.py b/tests/examples/test_rxd_net.py index 4da23a73a..0e7a9b23b 100644 --- a/tests/examples/test_rxd_net.py +++ b/tests/examples/test_rxd_net.py @@ -5,10 +5,10 @@ if '-nogui' not in sys.argv: sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/rxd_net/', 'mod']) +package_dir = NETPYNE_DIR + '/examples/rxd_net/' +@pytest.mark.package_data([package_dir, 'mod']) class TestRxdNet: def test_init(self, pkg_setup): import src.init diff --git a/tests/examples/test_saving.py b/tests/examples/test_saving.py index 451d29b71..0e3b838aa 100644 --- a/tests/examples/test_saving.py +++ b/tests/examples/test_saving.py @@ -5,10 +5,10 @@ sys.argv.append('-nogui') -from .utils import pkg_setup +from .utils import pkg_setup, NETPYNE_DIR - -@pytest.mark.package_data(['examples/saving', None]) +package_dir = NETPYNE_DIR + '/examples/saving/' +@pytest.mark.package_data([package_dir, None]) class Test_saving(): def test_init(self, pkg_setup): import src.init diff --git a/tests/examples/utils.py b/tests/examples/utils.py index 81d3ab0d3..4180a4d99 100644 --- a/tests/examples/utils.py +++ b/tests/examples/utils.py @@ -1,7 +1,10 @@ import os import pytest import sys +import inspect +import netpyne +NETPYNE_DIR = '/' + os.path.join(*inspect.getfile(netpyne).split('/')[:-2]) def compile_neuron_mod_dir(pkg_dir): try: print('Compiling {}'.format(pkg_dir)) @@ -14,7 +17,6 @@ def compile_neuron_mod_dir(pkg_dir): print(err) return - @pytest.fixture def pkg_setup(request): mark = request.node.get_closest_marker("package_data") From 8021936003110e3d2ea0f9250a9545f404a16401 Mon Sep 17 00:00:00 2001 From: jchen6727 Date: Mon, 13 May 2024 22:14:11 -0500 Subject: [PATCH 20/34] updating documentation (user_documentation.rst) re: new `batchtools` (beta version) (#819) * fixed bug in conn, updated test/examples/* to use dynamic pathing * update CHANGES.md * updated documentation with new batchtools (beta) --- doc/source/user_documentation.rst | 278 ++++++++++++++++++++++++++++++ 1 file changed, 278 insertions(+) diff --git a/doc/source/user_documentation.rst b/doc/source/user_documentation.rst index 8843f4c64..d46896a0f 100644 --- a/doc/source/user_documentation.rst +++ b/doc/source/user_documentation.rst @@ -2547,3 +2547,281 @@ The code for neural network optimization through evolutionary algorithm used in .. Adding cell classes .. -------------------- +Running a Batch Job (Beta) +=================== + +The NetPyNE batchtools subpackage provides a method of automating job submission and reporting:: + + + batch<-->\ /---> configuration_0 >---\ + \ / specs---\ + \<--->dispatcher_0 sim_0 + \ \ comm ---/ + \ \---< results_0 <---/ + \ + \ /---> configuration_1 >---\ + \ / specs---\ + \<--->dispatcher_1 sim_1 + \ \ comm ---/ + \ \---< results_1 <---/ + \ + \ + ... + + + +1. Setting up batchtools +----- +Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. + +The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: + + git clone https://github.com/Neurosim-lab/netpyne.git + cd netpyne + git checkout batch + pip install -e . + +The batchtools installation either:: + + pip install -u batchtk + +or a development install (recommended):: + + git clone https://github.com/jchen6727/batchtk.git + cd batchtk + pip install -e . + +Ray is a dependency for batchtools, and should be installed with the following command:: + + pip install -u ray[default] + +2. Examples +----- +Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `here `_. + +Examples of the underlying batchtk package can be in the ``examples`` directory `here `_. + +3. Retrieving batch configuration values through the ``specs`` object +----- +Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with +the dispatcher through the ``comm`` object. + +importing the relevant objects:: + + from netpyne.batchtools import specs, comm + cfg = specs.SimConfig() # create a SimConfig object + netParams = specs.NetParams() # create a netParams object + +``netpyne.batchtools.specs`` behaves similarly to ``netpyne.sim.specs`` except in the following cases: + +* ``netpyne.batchtools.specs`` automatically captures relevant configuration mappings created by the ``dispatcher`` upon initialization + + * these mappings can be retrieved via ``specs.get_mappings()`` + +* the SimConfig object created by ``netpyne.batch.specs.SimConfig()`` will update itself with relevant configuration mappings through the ``update()`` method:: + + from netpyne.batchtools import specs # import the custom batch specs + cfg = specs.SimConfig() # create a SimConfig object + cfg.update() # update the cfg object with any relevant mappings for this particular batch job + +The ``update`` method will update the ``SimConfig`` object with the configuration mappings captured in ``specs`` (see: ``specs.get_mappings()``) + +This replaces the previous idiom for updating the SimConfig object with mappings from the batched job submission:: + + try: + from __main__ import cfg # import SimConfig object with params from parent module + except: + from cfg import cfg # if no simConfig in parent module, import directly from tut8_cfg module + + + +4. Communicating results to the ``dispatcher`` with the ``comm`` object +----- + +Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back +The ``comm`` object determines the method of communication based on the batch job submission type. + +In terms of the simulation, the following functions are available to the user: + +* **comm.initialize()**: establishes a connection with the batch ``dispatcher`` for sending data + +* **comm.send()**: sends ```` to the batch ``dispatcher`` + * for ``search`` jobs, it is important to match the data sent with the metric specified in the search function + +* **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` + +5. Specifying a batch job +----- +Batch job handling is implemented with methods from ``netpyne.batchtools.search`` + +**search**:: + + def search(job_type: str, # the submission engine to run a single simulation (e.g. 'sge', 'sh') + comm_type: str, # the method of communication between host dispatcher and the simulation (e.g. 'socket', 'filesystem') + run_config: Dict, # batch configuration, (keyword: string pairs to customize the submit template) + params: Dict, # search space (dictionary of parameter keys: tune search spaces) + algorithm: Optional[str] = "variant_generator", # search algorithm to use, see SEARCH_ALG_IMPORT for available options + label: Optional[str] = 'search', # label for the search + output_path: Optional[str] = '../batch', # directory for storing generated files + checkpoint_path: Optional[str] = '../ray', # directory for storing checkpoint files + max_concurrent: Optional[int] = 1, # number of concurrent trials to run at one time + batch: Optional[bool] = True, # whether concurrent trials should run synchronously or asynchronously + num_samples: Optional[int] = 1, # number of trials to run + metric: Optional[str] = "loss", # metric to optimize (this should match some key: value pair in the returned data + mode: Optional[str] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric + algorithm_config: Optional[dict] = None, # additional configuration for the search algorithm + ) -> tune.ResultGrid: # results of the search + +The basic search implemented with the ``search`` function uses ``ray.tune`` as the search algorithm backend, returning a ``tune.ResultGrid`` which can be used to evaluate the search space and results. It takes the following parameters; + +* **job_type**: either "``sge``" or "``sh``", specifying how the job should be submitted, "``sge``" will submit batch jobs through the Sun Grid Engine. "``sh``" will submit bach jobs through the shell on a local machine +* **comm_type**: either "``socket``" or "``filesystem``", specifying how the job should communicate with the dispatcher +* **run_config**: a dictionary of keyword: string pairs to customize the submit template, the expected keyword: string pairs are dependent on the job_type:: + + ======= + sge + ======= + queue: the queue to submit the job to (#$ -q {queue}) + cores: the number of cores to request for the job (#$ -pe smp {cores}) + vmem: the amount of memory to request for the job (#$ -l h_vmem={vmem}) + realtime: the amount of time to request for the job (#$ -l h_rt={realtime}) + command: the command to run for the job + + example: + run_config = { + 'queue': 'cpu.q', # request job to be run on the 'cpu.q' queue + 'cores': 8, # request 8 cores for the job + 'vmem': '8G', # request 8GB of memory for the job + 'realtime': '24:00:00', # set timeout of the job to 24 hours + 'command': 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py' + } # set the command to be run to 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py' + + ======= + sh + ======= + command: the command to run for the job + + example: + run_config = { + 'command': 'mpiexec -n 8 nrniv -python -mpi init.py' + } # set the command to be run + +* **params**: a dictionary of config values to perform the search over. The keys of the dictionary should match the keys of the config object to be updated. Lists or numpy generators >2 values will force a grid search over the values; otherwise, a list of two values will create a uniform distribution sample space. + + **usage 1**: updating a constant value specified in the ``SimConfig`` object :: + + # take a config object with the following parameter ``foo`` + cfg = specs.SimConfig() + cfg.foo = 0 + cfg.update() + + # specify a search space for ``foo`` such that a simulation will run with: + # cfg.foo = 0 + # cfg.foo = 1 + # cfg.foo = 2 + # ... + # cfg.foo = 9 + + # using: + params = { + 'foo': range(10) + } + + **usage 2**: updating a nested object in the ``SimConfig`` object:: + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following parameter object ``foo`` + cfg = specs.SimConfig() + cfg.foo = {'bar': 0, 'baz': 0} + cfg.update() + + # specify a search space for ``foo['bar']`` with `foo.bar` such that a simulation will run: + # cfg.foo['bar'] = 0 + # cfg.foo['bar'] = 1 + # cfg.foo['bar'] = 2 + # ... + # cfg.foo['bar'] = 9 + + # using: + params = { + 'foo.bar': range(10) + } + + # this reflection works with nested objects as well... + # i.e. + # cfg.foo = {'bar': {'baz': 0}} + # params = {'foo.bar.baz': range(10)} + +* **algorithm** : the search algorithm (supported within ``ray.tune``) + + **Supported algorithms**:: + + * "variant_generator": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "random": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "axe": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "bayesopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "hyperopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "bohb": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "nevergrad": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "optuna": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "hebo": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "sigopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "zoopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + +* **label**: a label for the search, used for output file naming + +* **output_path**: the directory for storing generated files, can be a relative or absolute path + +* **checkpoint_path**: the directory for storing checkpoint files in case the search needs to be restored, can be a relative or absolute path + +* **max_concurrent**: the number of concurrent trials to run at one time, it is recommended to keep in mind the resource usage of each trial to avoid overscheduling + +* **batch**: whether concurrent trials should run synchronously or asynchronously + +* **num_samples**: the number of trials to run, for any grid search, each value in the grid will be sampled ``num_samples`` times. + +* **metric**: the metric to optimize (this should match some key: value pair in the returned data) + +* **mode**: either 'min' or 'max' (whether to minimize or maximize the metric) + +* **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) + +6. Performing parameter optimization searches (CA3 example) +----- +The ``examples`` directory `here `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. + +In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined:: + + # from optuna_search.py + params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], + 'ampa.PYR->BC' : [0.2e-3, 0.5e-3], + 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], + } + +in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined:: + + # from grid_search.py + params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), + 'ampa.PYR->BC' : numpy.linspace(0.2e-3, 0.5e-3, 3), + 'gaba.BC->PYR' : numpy.linspace(0.4e-3, 1.0e-3, 3), + } + +which defines ``3x3x3`` specific values to search over + +Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation:: + + # from init.py + results['PYR_loss'] = (results['PYR'] - 3.33875)**2 + results['BC_loss'] = (results['BC'] - 19.725 )**2 + results['OLM_loss'] = (results['OLM'] - 3.470 )**2 + results['loss'] = (results['PYR_loss'] + results['BC_loss'] + results['OLM_loss']) / 3 + out_json = json.dumps({**inputs, **results}) + + print(out_json) + #TODO put all of this in a single function. + comm.send(out_json) + comm.close() + +The ``out_json`` output contains a dictionary which includes the ``loss`` metric (calculated as the MSE between observed and expected values) + +In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) \ No newline at end of file From a0846bbeeac6f81be4981035463ef5b420459071 Mon Sep 17 00:00:00 2001 From: jchen6727 Date: Wed, 15 May 2024 23:17:21 -0500 Subject: [PATCH 21/34] quick fix, adding cfg.progressBar logic, fixed another issue with the loading bar. (#821) * Updated logic, bug fix cfg.progressBar = 2 will display all progress bars cfg.progressBar = 1 will call tqdm with progress bars w/ leave = 0 cfg.progressBar = 0 will disable the progress bar. --- CHANGES.md | 2 +- doc/source/user_documentation.rst | 13 +++---- netpyne/network/conn.py | 61 +++++++++++++++++-------------- netpyne/specs/simConfig.py | 2 +- 4 files changed, 41 insertions(+), 37 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 7c4219c2f..80ae1800b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,7 +2,7 @@ **New features** -- Added progress-bar indicating network creation progress +- Added progress-bar indicating network creation progress. Toggle the progress bar with cfg.progressBar - cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule diff --git a/doc/source/user_documentation.rst b/doc/source/user_documentation.rst index d46896a0f..1a7d52c31 100644 --- a/doc/source/user_documentation.rst +++ b/doc/source/user_documentation.rst @@ -2548,7 +2548,7 @@ The code for neural network optimization through evolutionary algorithm used in .. -------------------- Running a Batch Job (Beta) -=================== + The NetPyNE batchtools subpackage provides a method of automating job submission and reporting:: @@ -2571,7 +2571,7 @@ The NetPyNE batchtools subpackage provides a method of automating job submission 1. Setting up batchtools ------ + Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: @@ -2596,13 +2596,13 @@ Ray is a dependency for batchtools, and should be installed with the following c pip install -u ray[default] 2. Examples ------ + Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `here `_. Examples of the underlying batchtk package can be in the ``examples`` directory `here `_. 3. Retrieving batch configuration values through the ``specs`` object ------ + Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with the dispatcher through the ``comm`` object. @@ -2636,7 +2636,6 @@ This replaces the previous idiom for updating the SimConfig object with mappings 4. Communicating results to the ``dispatcher`` with the ``comm`` object ------ Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back The ``comm`` object determines the method of communication based on the batch job submission type. @@ -2651,7 +2650,7 @@ In terms of the simulation, the following functions are available to the user: * **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` 5. Specifying a batch job ------ + Batch job handling is implemented with methods from ``netpyne.batchtools.search`` **search**:: @@ -2787,7 +2786,7 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) 6. Performing parameter optimization searches (CA3 example) ------ + The ``examples`` directory `here `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined:: diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 079631e07..cd7c1f4b8 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -406,9 +406,10 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (all-to-all connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (all-to-all connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -427,11 +428,11 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): } for postCellGid in postCellsTags: # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -510,9 +511,11 @@ def probConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + str(connParam['label']), position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) + + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + str(connParam['label']), position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) allRands = self.generateRandsPrePost(preCellsTags, postCellsTags) @@ -546,14 +549,14 @@ def probConn(self, preCellsTags, postCellsTags, connParam): probMatrix, allRands, connParam['disynapticBias'], prePreGids, postPreGids ) for preCellGid, postCellGid in connGids: - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args connParam[paramStrFunc + 'Args'] = { k: v if isinstance(v, Number) else v(preCellsTags[preCellGid], postCellsTags[postCellGid]) for k, v in connParam[paramStrFunc + 'Vars'].items() } self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # standard probabilistic conenctions else: # print('rank %d'%(sim.rank)) @@ -561,7 +564,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): # calculate the conn preGids of the each pre and post cell # for postCellGid,postCellTags in sorted(postCellsTags.items()): # for each postsyn cell for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell # for each postsyn cell - if sim.rank==0: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell probability = ( @@ -578,7 +581,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): ) # connParam[paramStrFunc+'Args'] = {k:v if isinstance(v, Number) else v(preCellTags,postCellTags) for k,v in connParam[paramStrFunc+'Vars'].items()} self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- # Generate random unique integers @@ -651,9 +654,10 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (convergent connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (convergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -673,7 +677,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): hashPreCells = sim.hashList(preCellsTagsKeys) for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node convergence = ( connParam['convergenceFunc'][postCellGid] @@ -706,7 +710,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -739,9 +743,10 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(preCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} presynaptic cells on node %i (divergent connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(preCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} presynaptic cells on node %i (divergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -761,7 +766,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): hashPostCells = sim.hashList(postCellsTagsKeys) for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) divergence = ( connParam['divergenceFunc'][preCellGid] if 'divergenceFunc' in connParam else connParam['divergence'] ) # num of presyn conns / postsyn cell @@ -788,7 +793,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -821,10 +826,10 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of connections from list (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(connParam['connList']), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} pairs of neurons on node %i (from list)' % sim.rank) - + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(connParam['connList']), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} pairs of neurons on node %i (from list)' % sim.rank) orderedPreGids = sorted(preCellsTags) orderedPostGids = sorted(postCellsTags) @@ -864,7 +869,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): connParam['preSecFromList'] = list(connParam['preSec']) for iconn, (relativePreId, relativePostId) in enumerate(connParam['connList']): # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) preCellGid = orderedPreGids[relativePreId] postCellGid = orderedPostGids[relativePostId] if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids @@ -885,7 +890,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): # TODO: consider cfg.allowSelfConns? if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- diff --git a/netpyne/specs/simConfig.py b/netpyne/specs/simConfig.py index 45a1c475a..99018c2fa 100644 --- a/netpyne/specs/simConfig.py +++ b/netpyne/specs/simConfig.py @@ -72,7 +72,7 @@ def __init__(self, simConfigDict=None): self.printPopAvgRates = False # print population avg firing rates after run self.printSynsAfterRule = False # print total of connections after each conn rule is applied self.verbose = False # show detailed messages - + self.progressBar = 2 # (0: no progress bar; 1: progress bar w/ leave = False; 2: progress bar w/ leave = True) # Recording self.recordCells = [] # what cells to record traces from (eg. 'all', 5, or 'PYR') self.recordTraces = {} # Dict of traces to record From 8c8192507aeafc10025c569a31c01468d2dcdbe0 Mon Sep 17 00:00:00 2001 From: Jacob Sprouse <95829867+Jsprouse0@users.noreply.github.com> Date: Mon, 3 Jun 2024 13:10:45 -0400 Subject: [PATCH 22/34] updated mkdir to makedirs (bug fix) -- note the exist_ok change for later PR --- netpyne/batch/utils.py | 8 +++++--- netpyne/sim/save.py | 29 +++++++++++++++++------------ netpyne/specs/netParams.py | 9 +++++---- netpyne/specs/simConfig.py | 9 +++++---- 4 files changed, 32 insertions(+), 23 deletions(-) diff --git a/netpyne/batch/utils.py b/netpyne/batch/utils.py index c77e26ea1..2fcb79a4b 100644 --- a/netpyne/batch/utils.py +++ b/netpyne/batch/utils.py @@ -28,11 +28,13 @@ def createFolder(folder): import os + # If file path does not exist, it will create the file path (parent and sub-directories) if not os.path.exists(folder): try: - os.mkdir(folder) - except OSError: - print(' Could not create %s' % (folder)) + os.makedirs(folder) + except OSError as e: + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) # ------------------------------------------------------------------------------- diff --git a/netpyne/sim/save.py b/netpyne/sim/save.py index 3bae90a9c..c420ee332 100644 --- a/netpyne/sim/save.py +++ b/netpyne/sim/save.py @@ -103,12 +103,15 @@ def saveData(include=None, filename=None, saveLFP=True): print(('Copying cfg file %s ... ' % simName)) source = sim.cfg.backupCfgFile[0] targetFolder = sim.cfg.backupCfgFile[1] - # make dir + + # make directories required to make the target folder try: - os.mkdir(targetFolder) - except OSError: + os.makedirs(targetFolder) + except OSError as e: if not os.path.exists(targetFolder): - print(' Could not create target folder: %s' % (targetFolder)) + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create target folder: %s' % (targetFolder)) + # copy file targetFile = targetFolder + '/' + simName + '_cfg.py' if os.path.exists(targetFile): @@ -116,13 +119,14 @@ def saveData(include=None, filename=None, saveLFP=True): os.system('rm ' + targetFile) os.system('cp ' + source + ' ' + targetFile) - # create folder if missing + # create the missing folder & directory for folder if one or both are missing targetFolder = os.path.dirname(sim.cfg.filename) if targetFolder and not os.path.exists(targetFolder): try: - os.mkdir(targetFolder) - except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + os.makedirs(targetFolder) + except OSError as e: + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create target folder: %s' % (targetFolder)) # saving data if not include: @@ -175,13 +179,14 @@ def saveData(include=None, filename=None, saveLFP=True): if hasattr(sim.cfg, 'simLabel') and sim.cfg.simLabel: filePath = os.path.join(sim.cfg.saveFolder, sim.cfg.simLabel + '_data' + timestampStr) - # create folder if missing + # make directories for the target folder if they do not already exist targetFolder = os.path.dirname(filePath) if targetFolder and not os.path.exists(targetFolder): try: - os.mkdir(targetFolder) - except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + os.makedirs(targetFolder) + except OSError as e: + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create target folder: %s' % (targetFolder)) # Save to pickle file if sim.cfg.savePickle: diff --git a/netpyne/specs/netParams.py b/netpyne/specs/netParams.py index 72643fe50..df127d119 100644 --- a/netpyne/specs/netParams.py +++ b/netpyne/specs/netParams.py @@ -547,12 +547,13 @@ def save(self, filename): folder = filename.split(basename)[0] ext = basename.split('.')[1] - # make dir + # make directories if they do not already exist: try: - os.mkdir(folder) - except OSError: + os.makedirs(folder) + except OSError as e: if not os.path.exists(folder): - print(' Could not create', folder) + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) dataSave = {'net': {'params': self.todict()}} diff --git a/netpyne/specs/simConfig.py b/netpyne/specs/simConfig.py index 99018c2fa..44de77c4c 100644 --- a/netpyne/specs/simConfig.py +++ b/netpyne/specs/simConfig.py @@ -145,12 +145,13 @@ def save(self, filename): folder = filename.split(basename)[0] ext = basename.split('.')[1] - # make dir + # make directories if they do not already exist: try: - os.mkdir(folder) - except OSError: + os.makedirs(folder) + except OSError as e: if not os.path.exists(folder): - print(' Could not create', folder) + print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) dataSave = {'simConfig': self.__dict__} From bd4f89786f7612b73cea48e662407136d4bc9ba1 Mon Sep 17 00:00:00 2001 From: yubelyrn Date: Thu, 20 Jun 2024 12:02:01 -0500 Subject: [PATCH 23/34] Add examples of recording from synaptic currents mechanisms in the documentation --- doc/source/user_documentation.rst | 59 +++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/doc/source/user_documentation.rst b/doc/source/user_documentation.rst index 1a7d52c31..338e6ac2a 100644 --- a/doc/source/user_documentation.rst +++ b/doc/source/user_documentation.rst @@ -1183,6 +1183,65 @@ From a specific section and location, you can record section variables such as v # is equivalent to recording soma.myPP.V in NEURON. simConfig.recordTraces['VmyPP'] = {'sec': 'soma', 'pointp': 'myPP', 'var': 'V'} + ## Recording from Synaptic Currents Mechanisms + + # Example of recording from an excitatory synaptic mechanism + # record the 'i' variable (current) from an excitatory synaptic mechanism located + # in the middle of the 'dend' section. This is equivalent to recording + # dend(0.5).exc._ref_i in NEURON. + simConfig.recordTraces['iExcSyn'] = {'sec': 'dend', 'loc': 0.5, 'synMech': 'exc', 'var': 'i'} + + # Example of recording from an inhibitory synaptic mechanism + # record the 'i' variable (current) from an inhibitory synaptic mechanism located + # at 0.3 of the 'soma' section. This is equivalent to recording + # soma(0.3).inh._ref_i in NEURON. + simConfig.recordTraces['iInhSyn'] = {'sec': 'soma', 'loc': 0.3, 'synMech': 'inh', 'var': 'i'} + + # Example of recording multiple synaptic currents + # Recording synaptic currents + simConfig.recordSynapticCurrents = True + synaptic_curr = [ + ('AMPA', 'i'), # Excitatory synaptic current + ('NMDA', 'i'), # Excitatory synaptic current + ('GABA_A', 'i') # Inhibitory synaptic current + ] + if simConfig.recordSynapticCurrents: + for syn_curr in synaptic_curr: + trace_label = f'i__soma_0__{syn_curr[0]}__{syn_curr[1]}' + simConfig.recordTraces.update({trace_label: {'sec': 'soma_0', 'loc': 0.5, 'mech': syn_curr[0], 'var': syn_curr[1]}}) + +The names ``'iExcSyn'`` , ``'iInhSyn'`` , ``'AMPA'`` , ``'NMDA'`` , ``'GABA_A'`` are those defined by the user in ``netParams.synMechParams``, and can be found using ``netParams.synMechParams.keys()``. The variables that can be paired with the synaptic mechanism in ``synaptic_curr`` tuples can be found by inspecting the MOD file that is used to define that synaptic mechanism, and the ones that can be recorded are defined as ``RANGE`` type in the MOD file. + +Example: + +* A synaptic mechanism defined in ``netParams.synMechParamsas`` as ``'AMPA'`` that uses the ``MyExp2SynBB.mod`` template. + + * The user can go to the source ``/mod`` folder, and open the ``MyExp2SynBB.mod`` file, to inspect which variables are defined as ``RANGE``, and those can be recorded. + * The user can also modify the variable type, and define as ``RANGE``, and then recompile the mechanism to make it recordable in netpyne. + +* Example of variables that can be recorded for the given file + + * The user can record ``'tau1'``, ``'tau2'``, ``'e'``, ``'i'``, ``'g'``, ``'Vwt'``, ``'gmax'``. + + +.. code-block:: python + + : $Id: MyExp2SynBB.mod,v 1.4 2010/12/13 21:27:51 samn Exp $ + NEURON { + : THREADSAFE + POINT_PROCESS MyExp2SynBB + RANGE tau1, tau2, e, i, g, Vwt, gmax + NONSPECIFIC_CURRENT i + } + +Those should be specified in ``synaptic_curr`` as: + +.. code-block:: python + + synaptic_curr = [ + ('AMPA', 'i'), # Excitatory synaptic current + ('AMPA', 'g'), # Channel conductance + ] .. _package_functions: From 560e5fb5bb1a14ae6823887e8504ae08d276ffef Mon Sep 17 00:00:00 2001 From: yubelyrn Date: Tue, 2 Jul 2024 20:40:29 -0500 Subject: [PATCH 24/34] Remove saveFileStep parameter and its references --- examples/HHTut/src/HHTut.py | 1 - examples/HybridTut/src/cfg.py | 1 - examples/M1/src/cfg.py | 1 - examples/RL_arm/params.py | 1 - netpyne/tests/validate_tests.py | 1 - sdnotes.org | 1 - 6 files changed, 6 deletions(-) diff --git a/examples/HHTut/src/HHTut.py b/examples/HHTut/src/HHTut.py index 9ff40e99a..2d06c28e0 100644 --- a/examples/HHTut/src/HHTut.py +++ b/examples/HHTut/src/HHTut.py @@ -70,7 +70,6 @@ # Saving simConfig.filename = 'HHTut' # Set file output name -simConfig.saveFileStep = 1000 # step size in ms to save data to disk simConfig.savePickle = False # Whether or not to write spikes etc. to a .mat file simConfig.saveJson = True diff --git a/examples/HybridTut/src/cfg.py b/examples/HybridTut/src/cfg.py index 3000a52a0..ab178d5c2 100644 --- a/examples/HybridTut/src/cfg.py +++ b/examples/HybridTut/src/cfg.py @@ -23,7 +23,6 @@ # Saving cfg.filename = 'mpiHybridTut' # Set file output name -cfg.saveFileStep = 1000 # step size in ms to save data to disk cfg.savePickle = False # Whether or not to write spikes etc. to a .mat file cfg.saveJson = False # Whether or not to write spikes etc. to a .mat file cfg.saveMat = False # Whether or not to write spikes etc. to a .mat file diff --git a/examples/M1/src/cfg.py b/examples/M1/src/cfg.py index 89238f443..5c61b8684 100644 --- a/examples/M1/src/cfg.py +++ b/examples/M1/src/cfg.py @@ -34,7 +34,6 @@ # Saving cfg.filename = 'data/M1_ynorm_izhi' # Set file output name -cfg.saveFileStep = 1000 # step size in ms to save data to disk cfg.savePickle = False # save to pickle file cfg.saveJson = False # save to json file cfg.saveMat = False # save to mat file diff --git a/examples/RL_arm/params.py b/examples/RL_arm/params.py index a863d05ae..cf56ffe75 100644 --- a/examples/RL_arm/params.py +++ b/examples/RL_arm/params.py @@ -308,7 +308,6 @@ # Saving simConfig.filename = 'simdata' # Set file output name -simConfig.saveFileStep = 1000 # step size in ms to save data to disk simConfig.savePickle = True # Whether or not to write spikes etc. to a .mat file simConfig.saveJson = False # Whether or not to write spikes etc. to a .mat file simConfig.saveMat = False # Whether or not to write spikes etc. to a .mat file diff --git a/netpyne/tests/validate_tests.py b/netpyne/tests/validate_tests.py index 63cc3edb0..d4213c5c9 100644 --- a/netpyne/tests/validate_tests.py +++ b/netpyne/tests/validate_tests.py @@ -69,7 +69,6 @@ def loadSimConfigTests(self): # # # Saving # simConfigParams.simConfig.saveJson=1 - # simConfigParams.simConfig.saveFileStep = simConfigParams.simConfig.dt # step size in ms to save data to disk # # self.paramsMap["simConfig"]["durationTest"].append(simConfigParams) diff --git a/sdnotes.org b/sdnotes.org index 0e2c18667..0dae07748 100644 --- a/sdnotes.org +++ b/sdnotes.org @@ -7135,7 +7135,6 @@ simConfig.recordStep = 0.1 # Step size in ms to save data (eg. V traces, LFP, et # Saving simConfig.filename = 'HHTut' # Set file output name #simConfig.Label = 'sim1' -simConfig.saveFileStep = 1000 # step size in ms to save data to disk simConfig.savePickle = False # Whether or not to write spikes etc. to a .mat file simConfig.recordStim = True simConfig.saveJson = True # save to json file From 2ca43d0a3f2587c64becd6d5b3b815d0c12f846d Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Jul 2024 14:03:05 +0200 Subject: [PATCH 25/34] fixed generating rhythmic spiking pattern with 'uniform' option --- CHANGES.md | 2 ++ netpyne/cell/inputs.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 80ae1800b..ec8313735 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,6 +20,8 @@ - Fixed a bug in `gatherDataFromFiles()` where cellGids for node 0 were lost +- Fixed generating rhythmic spiking pattern with 'uniform' option + # Version 1.0.6 **New features** diff --git a/netpyne/cell/inputs.py b/netpyne/cell/inputs.py index 6ea1dcd99..53109e4dc 100644 --- a/netpyne/cell/inputs.py +++ b/netpyne/cell/inputs.py @@ -89,7 +89,9 @@ def createRhythmicPattern(params, rand): # Uniform Distribution elif distribution == 'uniform': n_inputs = params['repeats'] * freq * (stop - start) / 1000.0 - t_array = rand.uniform(start, stop, int(n_inputs)) + rand.uniform(start, stop) + vec = h.Vector(int(n_inputs)) + t_array = np.array(vec.setrand(rand)) if eventsPerCycle == 2: # Two arrays store doublet times t_input_low = t_array - 5 From 5c1737f63dba8af8880c0a1bfee15cc8a8bc0317 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Fri, 5 Jul 2024 14:16:33 +0200 Subject: [PATCH 26/34] updated link to installation instructions --- doc/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build.py b/doc/build.py index 3a3e6b5ff..75f572aff 100644 --- a/doc/build.py +++ b/doc/build.py @@ -51,7 +51,7 @@ 13) Announce the new release 13a) New release announcement text: NetPyNE v#.#.# is now available. For a complete list of changes and bug fixes see: https://github.com/suny-downstate-medical-center/netpyne/releases/tag/v#.#.# - See here for instructions to install or update to the latest version: http://www.netpyne.org/install.html + See here for instructions to install or update to the latest version: https://www.netpyne.org/documentation/installation 13b) Announce on NEURON forum: https://www.neuron.yale.edu/phpBB/viewtopic.php?f=45&t=3685&sid=9c380fe3a835babd47148c81ae71343e 13c) Announce to Google group: From b68c2cb464be8ffca544abfb4c16515e90ae9408 Mon Sep 17 00:00:00 2001 From: vvbragin Date: Thu, 11 Jul 2024 11:55:09 +0200 Subject: [PATCH 27/34] fixed misleading console output when cfg.recordStims is On --- CHANGES.md | 2 ++ netpyne/sim/setup.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index ec8313735..3cc7a9c08 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -22,6 +22,8 @@ - Fixed generating rhythmic spiking pattern with 'uniform' option +- Fixed misleading console output when cfg.recordStims is On + # Version 1.0.6 **New features** diff --git a/netpyne/sim/setup.py b/netpyne/sim/setup.py index 4c1094cdc..cc8646726 100644 --- a/netpyne/sim/setup.py +++ b/netpyne/sim/setup.py @@ -463,6 +463,8 @@ def setupRecording(): break if sim.cfg.recordStim: + if sim.cfg.verbose: + print(" Recording stims") sim.simData['stims'] = Dict() for cell in sim.net.cells: cell.recordStimSpikes() @@ -495,6 +497,8 @@ def setupRecording(): # record h.t if sim.cfg.recordTime and len(sim.simData) > 0: + if sim.cfg.verbose: + print(" Recording h.t") try: sim.simData['t'] = h.Vector() # sim.cfg.duration/sim.cfg.recordStep+1).resize(0) if hasattr(sim.cfg, 'use_local_dt') and sim.cfg.use_local_dt: @@ -510,7 +514,8 @@ def setupRecording(): # print recorded traces cat = 0 total = 0 - for key in sim.simData: + keys = [k for k in sim.simData.keys() if k not in ['t', 'stims', 'spkt', 'spkid']] + for key in keys: if sim.cfg.verbose: print((" Recording: %s:" % key)) if len(sim.simData[key]) > 0: From 20f991c1e45410119072d2edebec1aa0bf7da3fe Mon Sep 17 00:00:00 2001 From: Christian O'Reilly Date: Tue, 6 Aug 2024 19:28:24 -0400 Subject: [PATCH 28/34] Update scalebar.py Removing the minimumdescent=False from the call to TextArea --- netpyne/support/scalebar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netpyne/support/scalebar.py b/netpyne/support/scalebar.py index fe71441cd..6f6af8890 100644 --- a/netpyne/support/scalebar.py +++ b/netpyne/support/scalebar.py @@ -53,7 +53,7 @@ def __init__( bars.add_artist(Rectangle((0, 0), 0, sizey, ec=barcolor, lw=barwidth, fc="none")) if sizex and labelx: - self.xlabel = TextArea(labelx, minimumdescent=False) + self.xlabel = TextArea(labelx) bars = VPacker(children=[bars, self.xlabel], align="center", pad=0, sep=sep) if sizey and labely: self.ylabel = TextArea(labely) From 9c63297d5840828def334e00e56e75689879d56b Mon Sep 17 00:00:00 2001 From: vvbragin Date: Wed, 14 Aug 2024 10:05:10 +0200 Subject: [PATCH 29/34] fix in fromListConn --- netpyne/network/conn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index cd7c1f4b8..033f5298f 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -856,7 +856,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if 'loc' in connParam and isinstance(connParam['loc'], list): connParam['locFromList'] = list(connParam['loc']) # if delay is a list, copy to locFromList - if connParam['synsPerConn'] == 1: + if connParam.get('synsPerConn', 1) == 1: if isinstance(connParam.get('sec'), list): connParam['secFromList'] = list(connParam['sec']) else: From 91d8837aae9571bffd08ba080df484151ecd0044 Mon Sep 17 00:00:00 2001 From: jchen6727 Date: Thu, 29 Aug 2024 20:53:13 -0500 Subject: [PATCH 30/34] merge of batchtools into the development repository. (#829) * batch tools subpackage merge into dev, see PR #818 #822 #826 #827 see documentation --- doc/source/user_documentation.rst | 287 ++++++++++-- netpyne/batch/utils.py | 12 +- netpyne/batchtools/__init__.py | 27 ++ netpyne/batchtools/analysis.py | 55 +++ netpyne/batchtools/comm.py | 48 ++ netpyne/batchtools/docs/batchtools.ipynb | 314 +++++++++++++ netpyne/batchtools/docs/batchtools.rst | 435 ++++++++++++++++++ netpyne/batchtools/evol.py | 6 + netpyne/batchtools/examples/CA3/README.md | 0 netpyne/batchtools/examples/CA3/cfg.py | 48 ++ .../batchtools/examples/CA3/grid_search.py | 41 ++ netpyne/batchtools/examples/CA3/init.py | 27 ++ .../examples/CA3/mo_optuna_search.py | 50 ++ netpyne/batchtools/examples/CA3/mod/CA1ih.mod | 64 +++ .../batchtools/examples/CA3/mod/CA1ika.mod | 85 ++++ .../batchtools/examples/CA3/mod/CA1ikdr.mod | 60 +++ .../batchtools/examples/CA3/mod/CA1ina.mod | 89 ++++ .../examples/CA3/mod/MyExp2SynBB.mod | 67 +++ .../examples/CA3/mod/MyExp2SynNMDABB.mod | 108 +++++ .../batchtools/examples/CA3/mod/aux_fun.inc | 43 ++ .../batchtools/examples/CA3/mod/caolmw.mod | 47 ++ .../batchtools/examples/CA3/mod/icaolmw.mod | 51 ++ .../batchtools/examples/CA3/mod/iholmw.mod | 60 +++ .../batchtools/examples/CA3/mod/kcaolmw.mod | 52 +++ .../batchtools/examples/CA3/mod/kdrbwb.mod | 76 +++ .../batchtools/examples/CA3/mod/nafbwb.mod | 81 ++++ netpyne/batchtools/examples/CA3/netParams.py | 321 +++++++++++++ .../batchtools/examples/CA3/optuna_search.py | 39 ++ .../examples/categorical_strings/batch.py | 18 + .../categorical_strings/categorical.py | 26 ++ .../batchtools/examples/cfg_loading/batch.py | 18 + .../examples/cfg_loading/categorical.csv | 13 + .../examples/cfg_loading/categorical.py | 13 + .../examples/jupyter/batchtools.ipynb | 314 +++++++++++++ .../rosenbrock/basic_rosenbrock/batch.py | 21 + .../rosenbrock/basic_rosenbrock/cma_batch.py | 31 ++ .../rosenbrock/basic_rosenbrock/rosenbrock.py | 37 ++ .../rosenbrock/coupled_rosenbrock/batch.py | 24 + .../coupled_rosenbrock/rosenbrock.py | 41 ++ .../rosenbrock/fanova_rosenbrock/analysis.py | 6 + .../rosenbrock/fanova_rosenbrock/batch.py | 24 + .../fanova_rosenbrock/rosenbrock.py | 49 ++ .../rosenbrock/nested_rosenbrock/batch.py | 23 + .../nested_rosenbrock/rosenbrock.py | 41 ++ netpyne/batchtools/header.py | 0 netpyne/batchtools/runners.py | 266 +++++++++++ netpyne/batchtools/search.py | 430 +++++++++++++++++ netpyne/batchtools/submits.py | 167 +++++++ netpyne/batchtools/tests/gain.json | 52 +++ netpyne/batchtools/tests/map_example1.py | 17 + netpyne/batchtools/tests/map_example2.py | 34 ++ netpyne/batchtools/tests/test_map.py | 67 +++ netpyne/batchtools/tests/test_trial.py | 2 + netpyne/sim/save.py | 54 +-- netpyne/specs/netParams.py | 9 +- netpyne/specs/simConfig.py | 28 +- 56 files changed, 4341 insertions(+), 77 deletions(-) create mode 100644 netpyne/batchtools/__init__.py create mode 100644 netpyne/batchtools/analysis.py create mode 100644 netpyne/batchtools/comm.py create mode 100644 netpyne/batchtools/docs/batchtools.ipynb create mode 100644 netpyne/batchtools/docs/batchtools.rst create mode 100644 netpyne/batchtools/evol.py create mode 100644 netpyne/batchtools/examples/CA3/README.md create mode 100644 netpyne/batchtools/examples/CA3/cfg.py create mode 100644 netpyne/batchtools/examples/CA3/grid_search.py create mode 100644 netpyne/batchtools/examples/CA3/init.py create mode 100644 netpyne/batchtools/examples/CA3/mo_optuna_search.py create mode 100644 netpyne/batchtools/examples/CA3/mod/CA1ih.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/CA1ika.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/CA1ikdr.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/CA1ina.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/MyExp2SynBB.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/MyExp2SynNMDABB.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/aux_fun.inc create mode 100644 netpyne/batchtools/examples/CA3/mod/caolmw.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/icaolmw.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/iholmw.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/kcaolmw.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/kdrbwb.mod create mode 100644 netpyne/batchtools/examples/CA3/mod/nafbwb.mod create mode 100644 netpyne/batchtools/examples/CA3/netParams.py create mode 100644 netpyne/batchtools/examples/CA3/optuna_search.py create mode 100644 netpyne/batchtools/examples/categorical_strings/batch.py create mode 100644 netpyne/batchtools/examples/categorical_strings/categorical.py create mode 100644 netpyne/batchtools/examples/cfg_loading/batch.py create mode 100644 netpyne/batchtools/examples/cfg_loading/categorical.csv create mode 100644 netpyne/batchtools/examples/cfg_loading/categorical.py create mode 100644 netpyne/batchtools/examples/jupyter/batchtools.ipynb create mode 100644 netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/cma_batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/analysis.py create mode 100644 netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/header.py create mode 100644 netpyne/batchtools/runners.py create mode 100644 netpyne/batchtools/search.py create mode 100644 netpyne/batchtools/submits.py create mode 100644 netpyne/batchtools/tests/gain.json create mode 100644 netpyne/batchtools/tests/map_example1.py create mode 100644 netpyne/batchtools/tests/map_example2.py create mode 100644 netpyne/batchtools/tests/test_map.py create mode 100644 netpyne/batchtools/tests/test_trial.py diff --git a/doc/source/user_documentation.rst b/doc/source/user_documentation.rst index 1a7d52c31..11f4ded24 100644 --- a/doc/source/user_documentation.rst +++ b/doc/source/user_documentation.rst @@ -2547,11 +2547,14 @@ The code for neural network optimization through evolutionary algorithm used in .. Adding cell classes .. -------------------- -Running a Batch Job (Beta) +Running a Batch Job +=================== +The NetPyNE batchtools subpackage provides a method of automating job submission and reporting -The NetPyNE batchtools subpackage provides a method of automating job submission and reporting:: +A diagram of the object interfaces... +:: batch<-->\ /---> configuration_0 >---\ \ / specs---\ @@ -2568,24 +2571,39 @@ The NetPyNE batchtools subpackage provides a method of automating job submission \ ... +While objects and interfaces can be handled directly, batchtools offers simple wrapper commands applicable to most use-cases, where +automatic parameter searches can be done by specifying a search space and algorithm through `netpyne.batchtools.search`, and +parameter to model translation and result communication is handled through `netpyne.batchtools.specs` and `netpyne.batchtools.comm` respectively. +A diagram of the wrapper interactions... -1. Setting up batchtools +:: -Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. + netpyne.batchtools.search.search( ) ----------------------------\ host + | | + | search( ) | + ============================================================================================== + | comm.initialize( ) + | comm.send( ) + | cfg = netpyne.batchtools.specs.SimConfig( ) comm.close( ) + | | ^ ^ + v v | | + cfg.update_cfg() ----------------------------------------/ | + | + send( ) netpyne.batchtools.comm( ) + simulation -The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: +1. Setting up batchtools +------------------------ +Beyond the necessary dependency installations for NetPyNE and NEURON, the following `pip` installations are preferred. + +The NetPyNE installation should be handled as a development installation to allow for up to date fixes:: git clone https://github.com/Neurosim-lab/netpyne.git cd netpyne - git checkout batch pip install -e . -The batchtools installation either:: - - pip install -u batchtk - -or a development install (recommended):: +A development install of the batchtools subpackage:: git clone https://github.com/jchen6727/batchtk.git cd batchtk @@ -2593,23 +2611,25 @@ or a development install (recommended):: Ray is a dependency for batchtools, and should be installed with the following command:: - pip install -u ray[default] + pip install -U ray[default] 2. Examples +----------- +Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `on the NetPyNE github `_. -Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `here `_. - -Examples of the underlying batchtk package can be in the ``examples`` directory `here `_. +Examples of the underlying batchtk package can be in the ``examples`` directory `on the batchtk github `_. 3. Retrieving batch configuration values through the ``specs`` object - +--------------------------------------------------------------------- Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with the dispatcher through the ``comm`` object. -importing the relevant objects:: +importing the relevant objects + +.. code-block:: python from netpyne.batchtools import specs, comm - cfg = specs.SimConfig() # create a SimConfig object + cfg = specs.SimConfig() # create a SimConfig object, can be provided with a dictionary on initial call to set initial values netParams = specs.NetParams() # create a netParams object ``netpyne.batchtools.specs`` behaves similarly to ``netpyne.sim.specs`` except in the following cases: @@ -2618,24 +2638,76 @@ importing the relevant objects:: * these mappings can be retrieved via ``specs.get_mappings()`` -* the SimConfig object created by ``netpyne.batch.specs.SimConfig()`` will update itself with relevant configuration mappings through the ``update()`` method:: +* the SimConfig object created by ``netpyne.batch.specs.SimConfig()`` will update itself with relevant configuration mappings through the ``update()`` method + +.. code-block:: python from netpyne.batchtools import specs # import the custom batch specs cfg = specs.SimConfig() # create a SimConfig object cfg.update() # update the cfg object with any relevant mappings for this particular batch job -The ``update`` method will update the ``SimConfig`` object with the configuration mappings captured in ``specs`` (see: ``specs.get_mappings()``) +The ``update`` method will update the ``SimConfig`` object ``first`` with values supplied in the argument call, and ``then`` with the configuration mappings captured in ``specs`` (see: ``specs.get_mappings()``) -This replaces the previous idiom for updating the SimConfig object with mappings from the batched job submission:: +This replaces the previous idiom for updating the SimConfig object with mappings from the batched job submission +.. code-block:: python try: from __main__ import cfg # import SimConfig object with params from parent module except: from cfg import cfg # if no simConfig in parent module, import directly from tut8_cfg module +4. Additional functionality within the simConfig object +------------------------------------------------------- +Rather than handling custom ``SimConfig`` object attribute declaration through the ``batch`` ``initCfg`` argument, the new NetPyNE batchtools subpackage moves the custom declaration of ``SimConfig`` attributes to the actual ``SimConfig`` object, allowing them both during instantiation of the object as well as when calling ``cfg.update()`` -4. Communicating results to the ``dispatcher`` with the ``comm`` object +.. code-block:: python + + from netpyne.batchtools import specs # import the custom batch specs + cfg = specs.SimConfig({'foo': 0, 'bar': 1, 'baz': 2}) # create a SimConfig object, initializes it with a dictionary {'foo': 0} such that + assert cfg.foo == 0 # cfg.foo == 0 + assert cfg.bar == 1 # cfg.bar == 1 + assert cfg.baz == 2 # cfg.baz == 2 + cfg.update({'foo': 3}) # update the cfg object with any relevant mappings for this particular batch job + assert cfg.foo == 3 # cfg.foo == 3 + assert cfg.bar == 1 # cfg.bar remains unchanged + assert cfg.baz == 2 # cfg.baz remains unchanged + +``cfg.update()`` supports also supports the optional argument ``force_match``, which forces values in the update dictionary to match existing attributes within the ``SimConfig`` object. This setting is recommended to be set to ``True`` in order to prevent the unanticipated creation of new attributes within the ``SimConfig`` object at runtime ... + +.. code-block:: python + + from netpyne.batchtools import specs # import the custom batch specs + cfg = specs.SimConfig({'type': 0}) # create a SimConfig object, initializes it with a dictionary {'type': 0} such that + assert cfg.foo == 0 # cfg.type == 0 + try: + cfg.update({'typo': 1}, force_match=True) # cfg.typo is not defined, so this line will raise an AttributeError + except Exception as e: + print(e) + cfg.update({'typo': 1}) # without force_match, the typo attribute cfg.fooo is created and set to 1 + assert cfg.type == 0 # cfg.type remains unchanged due to a typo in the attribute name 'type' -> 'typo' + assert cfg.typo == 1 # instead, cfg.typo is created and set to the value 1 + +Both the initialization of the ``cfg`` object with ``specs.SimConfig()`` and the subsequent call to ``cfg.update()`` handle nested containers... + +.. code-block:: python + + from netpyne.batchtools import specs + cfg = specs.SimConfig({'foo': {'val0': 0, 'arr0': [0, 1, 2]}}) + assert cfg.foo['val0'] == 0 + assert cfg.foo['arr0'][0] == 0 + cfg.update({'foo': {'val0': 10, # update cfg.foo['val0'] to 10 + 'arr0': {0: 20 # update cfg.arr0[0] to 20 + 1: 30}}}) # update cfg.arr0[1] to 30 + assert cfg.foo['val0'] == 10 + assert cfg.foo['arr0'][0] == 20 + assert cfg.foo['arr0'][1] == 30 + assert cfg.foo['arr0'][2] == 2 # cfg.arr0[2] remains unchanged + +After updating the ``cfg`` object with the supplied dictionary, further updates will be made as appropriate by the calling ``batch`` processes search parameters... + +5. Communicating results to the ``dispatcher`` with the ``comm`` object +----------------------------------------------------------------------- Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back The ``comm`` object determines the method of communication based on the batch job submission type. @@ -2645,15 +2717,18 @@ In terms of the simulation, the following functions are available to the user: * **comm.initialize()**: establishes a connection with the batch ``dispatcher`` for sending data * **comm.send()**: sends ```` to the batch ``dispatcher`` + * for ``search`` jobs, it is important to match the data sent with the metric specified in the search function * **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` -5. Specifying a batch job - +6. Specifying a batch job +------------------------- Batch job handling is implemented with methods from ``netpyne.batchtools.search`` -**search**:: +**search** + +.. code-block:: python def search(job_type: str, # the submission engine to run a single simulation (e.g. 'sge', 'sh') comm_type: str, # the method of communication between host dispatcher and the simulation (e.g. 'socket', 'filesystem') @@ -2707,7 +2782,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **params**: a dictionary of config values to perform the search over. The keys of the dictionary should match the keys of the config object to be updated. Lists or numpy generators >2 values will force a grid search over the values; otherwise, a list of two values will create a uniform distribution sample space. - **usage 1**: updating a constant value specified in the ``SimConfig`` object :: + **usage 1**: updating a constant value specified in the ``SimConfig`` object + +.. code-block:: python # take a config object with the following parameter ``foo`` cfg = specs.SimConfig() @@ -2726,7 +2803,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t 'foo': range(10) } - **usage 2**: updating a nested object in the ``SimConfig`` object:: + **usage 2**: updating a nested object in the ``SimConfig`` object + +.. code-block:: python # to update a nested object, the package uses the `.` operator to specify reflection into the object. # take a config object with the following parameter object ``foo`` @@ -2751,9 +2830,35 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t # cfg.foo = {'bar': {'baz': 0}} # params = {'foo.bar.baz': range(10)} + **usage 3**: updating a list object in the ``SimConfig`` object + +.. code-block:: python + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following + cfg = specs.SimConfig() + cfg.foo = [0, 1, 4, 9, 16] + cfg.update() + + # specify a search space for ``foo[0]`` with `foo.0` such that a simulation will run: + # cfg.foo[0] = 0 + # cfg.foo[0] = 1 + # cfg.foo[0] = 2 + # ... + # cfg.foo[0] = 9 + + # using: + params = { + 'foo.0': range(10) + } + + # this reflection works with nested objects as well... + * **algorithm** : the search algorithm (supported within ``ray.tune``) - **Supported algorithms**:: + **Supported algorithms** + +.. code-block:: python * "variant_generator": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) * "random": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) @@ -2771,7 +2876,7 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **output_path**: the directory for storing generated files, can be a relative or absolute path -* **checkpoint_path**: the directory for storing checkpoint files in case the search needs to be restored, can be a relative or absolute path +* **checkpoint_path**: the directory for storing checkpoint files (maintained by ``ray.tune``)in case the search needs to be restored, can be a relative or absolute path * **max_concurrent**: the number of concurrent trials to run at one time, it is recommended to keep in mind the resource usage of each trial to avoid overscheduling @@ -2785,11 +2890,87 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) -6. Performing parameter optimization searches (CA3 example) +7. Batch searches on the Rosenbrock function (some simple examples) +------------------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ contains multiple methods of performing automatic parameter search of a +2 dimensional Rosenbrock function. These examples are used to quickly demonstrate some of the functionality of batch communications rather than the full process of running parameter searches on a detailed +NEURON simulation (see 7. Performing parameter optimization searches (CA3 example)) and therefore only contain the a `batch.py` file containing the script detailing the parameter space and search method, and a +`rosenbrock.py` file containing the function to explore, and the appropriate declarations and calls for batch automation and communication (rather than the traditional `cfg.py`, `netParams.py`, and `init.py` files). + +1. `basic_rosenbrock `_ -The ``examples`` directory `here `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. +This demonstrates a basic grid search of the Rosenbrock function using the new ``batchtools``, where the search space is defined as the cartesian product of ``params['x0']`` and ``params['x1']`` -In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined:: +.. code-block:: python + + # from batch.py + params = {'x0': [0, 3], + 'x1': [0, 3], + } + +that is, with the values ``cfg.x0``, ``cfg.x1`` iterating over: ``[(0, 0), (0, 3), (3, 0), (3, 3)]`` list + +2. `coupled_rosenbrock `_ + +This demonstrates a basic paired grid search, where ``x0`` is ``[0, 1, 2]`` and x1[n] is ``x0[n]**2`` + +.. code-block:: python + + # from batch.py + x0 = numpy.arange(0, 3) + x1 = x0**2 + + x0_x1 = [*zip(x0, x1)] + params = {'x0_x1': x0_x1 + } + +the ``x0`` and ``x1`` values are paired together to create a search space ``x0_x1`` iterating over: ``[(0, 0), (1, 1), (2, 4)]`` list + +then, in the ``rosenbrock.py`` file, a list of two values ``cfg.x0_x1`` is created to capture the ``x0_x1`` values, which is then unpacked into individual ``x0`` and ``x1`` values + +.. code-block:: python + + # from rosenbrock.py + cfg.x0_x1 = [1, 1] + + cfg.update_cfg() + + # -------------- unpacking x0_x1 list -------------- # + x0, x1 = cfg.x0_x1 + +then the Rosenbrock function is evaluated with the unpacked ``x0`` and ``x1`` + +3. `random_rosenbrock `_ + +This demonstrates a grid search over a nested object, where ``xn`` is a list of 2 values which are independently modified to search the cartesian product of ``[0, 1, 2, 3, 4]`` and ``[0, 1, 2, 3, 4]`` + +.. code-block:: python + + # from batch.py + params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +By using ``xn.0`` and ``xn.1`` we can reference the 0th and 1st elements of the list, which is created and modified in rosenbrock.py + +.. code-block:: python + + # from rosenbrock.py + cfg.xn = [1, 1] + + cfg.update_cfg() + + # ---------------- unpacking x list ---------------- # + x0, x1 = cfg.xn + + +8. Performing parameter optimization searches (CA3 example) +----------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. + +In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined + +.. code-block:: python # from optuna_search.py params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], @@ -2797,7 +2978,9 @@ In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic w 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], } -in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined:: +in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined + +.. code-block:: python # from grid_search.py params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), @@ -2807,7 +2990,9 @@ in both ``optuna_search.py``, defining the upper and lower bounds of the search which defines ``3x3x3`` specific values to search over -Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation:: +Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation + +.. code-block:: python # from init.py results['PYR_loss'] = (results['PYR'] - 3.33875)**2 @@ -2823,4 +3008,36 @@ Note that the ``metric`` specifies a specific ``string`` (``loss``) to report an The ``out_json`` output contains a dictionary which includes the ``loss`` metric (calculated as the MSE between observed and expected values) -In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) \ No newline at end of file +In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) + +9. Multiprocessing and parallelization +-------------------------------------- +When using ``mpiexec`` to run simulations, it is important to only have one thread handle communications with the ``dispatcher`` host. To do this, encapsulate calls to ``comm.send()`` and ``comm.close()`` within +a conditional block which checks that the proper thread (set as the 0th thread) executes communication calls as follows... + +.. code-block:: python + + out_json = json.dumps({**inputs, **results}) # prepare message to send to host... + if comm.is_host(): # only single thread enters this execution block... + comm.send(out_json) + comm.close() + +9. Parameter Importance Evaluation Using fANOVA +----------------------------------------------- +A new feature in the batchtools beta release is the ability to evaluate parameter importance using a functional ANOVA inspired algorithm via the ``Optuna`` and ``scikit-learn`` libraries. +(See `the original Hutter paper `_ and its `citation `_) + +Currently, only unpaired single parameter importance to a single metric score is supported through the ``NetPyNE.batchtools.analysis`` ``Analyzer`` object, with an example of its usage +`here `_: + +To run the example, generate an output ``grid.csv`` using ``batch.py``, then loading that ``grid.csv`` into the ``Analyzer`` object. Then, using ``run_analysis`` will generate, per parameter, a single score indicative of the estimated ``importance`` of the parameter: that is, the estimated effect on the total variance of the model within the given bounds. + +.. code-block:: python + + # from analysis.py + from netpyne.batchtools.analysis import Analyzer + + analyzer = Analyzer(params = ['x.0', 'x.1', 'x.2', 'x.3'], metrics = ['fx']) # specify the parameter space and metrics of the batch function + analyzer.load_file('grid.csv') # load the grid file generated by the batch run + results = analyzer.run_analysis() # run fANOVA analysis and store the importance values in a results dictionary + diff --git a/netpyne/batch/utils.py b/netpyne/batch/utils.py index 2fcb79a4b..99bdbe759 100644 --- a/netpyne/batch/utils.py +++ b/netpyne/batch/utils.py @@ -29,12 +29,12 @@ def createFolder(folder): import os # If file path does not exist, it will create the file path (parent and sub-directories) - if not os.path.exists(folder): - try: - os.makedirs(folder) - except OSError as e: - print('%s: OSError: %s,' % (os.path.abspath(__file__), e)) - raise SystemExit('Could not create %s' % (folder)) + + try: + os.makedirs(folder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) # ------------------------------------------------------------------------------- diff --git a/netpyne/batchtools/__init__.py b/netpyne/batchtools/__init__.py new file mode 100644 index 000000000..5a373d286 --- /dev/null +++ b/netpyne/batchtools/__init__.py @@ -0,0 +1,27 @@ +from netpyne.batchtools.runners import NetpyneRunner +from batchtk.runtk import dispatchers +from netpyne.batchtools import submits +from batchtk import runtk +from netpyne.batchtools.analysis import Analyzer + +specs = NetpyneRunner() + +from netpyne.batchtools.comm import Comm + +comm = Comm() + +dispatchers = dispatchers +submits = submits +runtk = runtk + + +""" +def analyze_from_file(filename): + analyzer = Fanova() + analyzer.load_file(filename) + analyzer.run_analysis( +""" + +#from ray import tune as space.comm +#list and lb ub + diff --git a/netpyne/batchtools/analysis.py b/netpyne/batchtools/analysis.py new file mode 100644 index 000000000..561e339ab --- /dev/null +++ b/netpyne/batchtools/analysis.py @@ -0,0 +1,55 @@ +import pandas +from collections import namedtuple +import numpy + +from optuna.importance._fanova._fanova import _Fanova + + +class Fanova(object): + def __init__(self, n_trees: int = 64, max_depth: int = 64, seed: int | None = None) -> None: + self._evaluator = _Fanova( + n_trees=n_trees, + max_depth=max_depth, + min_samples_split=2, + min_samples_leaf=1, + seed=seed, + ) + + def evaluate(self, X: pandas.DataFrame, y: pandas.DataFrame) -> dict: + assert X.shape[0] == y.shape[0] # all rows must be present + assert y.shape[1] == 1 # only evaluation for single metric supported + + evaluator = self._evaluator + #mins, maxs = X.min().values, X.max().values #in case bound matching is necessary. + search_spaces = numpy.array([X.min().values, X.max().values]).T # bounds + column_to_encoded_columns = [numpy.atleast_1d(i) for i in range(X.shape[1])] # encoding (no 1 hot/categorical) + evaluator.fit(X.values, y.values.ravel(), search_spaces, column_to_encoded_columns) + importances = numpy.array( + [evaluator.get_importance(i)[0] for i in range(X.shape[1])] + ) + return {col: imp for col, imp in zip(X.columns, importances)} + + +class Analyzer(object): + def __init__(self, + params: list, # list of parameters + metrics: list, # list of metrics + evaluator = Fanova()) -> None: + self.params = params + self.metrics = metrics + self.data = None + self.evaluator = evaluator + + def load_file(self, + filename: str # filename (.csv) containing the completed batchtools trials + ) -> None: + data = pandas.read_csv(filename) + param_space = data[["config/{}".format(param) for param in self.params]] + param_space = param_space.rename(columns={'config/{}'.format(param): param for param in self.params}) + results = data[self.metrics] + self.data = namedtuple('data', ['param_space', 'results'])(param_space, results) + + def run_analysis(self) -> dict: + return self.evaluator.evaluate(self.data.param_space, self.data.results) + + diff --git a/netpyne/batchtools/comm.py b/netpyne/batchtools/comm.py new file mode 100644 index 000000000..1a8a12763 --- /dev/null +++ b/netpyne/batchtools/comm.py @@ -0,0 +1,48 @@ +from netpyne.batchtools import specs +from batchtk.runtk.runners import get_class +from batchtk import runtk +from neuron import h +import warnings +HOST = 0 # for the purposes of send and receive with mpi. + +class Comm(object): + def __init__(self, runner = specs): + self.runner = runner + h.nrnmpi_init() + self.pc = h.ParallelContext() + self.rank = self.pc.id() + self.connected = False + + def initialize(self): + if self.is_host(): + try: + self.runner.connect() + self.connected = True + except Exception as e: + print("Failed to connect to the Dispatch Server, failover to Local mode. See: {}".format(e)) + self.runner._set_inheritance('file') #TODO or could change the inheritance of the runner ... + self.runner.env[runtk.MSGOUT] = "{}/{}.out".format(self.runner.cfg.saveFolder, self.runner.cfg.simLabel) + + def set_runner(self, runner_type): + self.runner = get_class(runner_type)() + def is_host(self): + return self.rank == HOST + def send(self, data): + if self.is_host(): + if self.connected: + self.runner.send(data) + else: + self.runner.write(data) + + def recv(self): #TODO to be tested, broadcast to all workers? + if self.is_host() and self.connected: + data = self.runner.recv() + else: + data = None + #data = self.is_host() and self.runner.recv() + #probably don't put a blocking statement in a boolean evaluation... + self.pc.barrier() + return self.pc.py_broadcast(data, HOST) + + def close(self): + self.runner.close() diff --git a/netpyne/batchtools/docs/batchtools.ipynb b/netpyne/batchtools/docs/batchtools.ipynb new file mode 100644 index 000000000..22fe3489f --- /dev/null +++ b/netpyne/batchtools/docs/batchtools.ipynb @@ -0,0 +1,314 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "Jupyter Tutorial: The NetPyNE batchtools subpackage\n", + "How to use the `specs` and `comm` to communicate with the `batchtools` `dispatcher`\n" + ], + "metadata": { + "collapsed": false + }, + "id": "89ec6ca2392a9a0d" + }, + { + "cell_type": "markdown", + "source": [ + "For each individual `sim`, communication with the `batchtools` `dispatcher` occurs through the `specs` and `comm` objects" + ], + "metadata": { + "collapsed": false + }, + "id": "be50f40d8e61a944" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from netpyne.batchtools import specs, comm" + ], + "metadata": { + "collapsed": false + }, + "id": "6f321aedb7faf945", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "the `specs` object is an instantiation of a custom class extending the `batchtk` `Runner` ..." + ], + "metadata": { + "collapsed": false + }, + "id": "5f2f08f0b5e582c3" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "help(type(specs))" + ], + "metadata": { + "collapsed": false + }, + "id": "29fa261236494bc3", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "From this `specs` object, we can similarly call `specs.NetParams` and `specs.SimConfig` to create the NetPyNE objects..." + ], + "metadata": { + "collapsed": false + }, + "id": "64ead24451bbad4a" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "help(specs.NetParams)\n", + "help(specs.SimConfig)" + ], + "metadata": { + "collapsed": false + }, + "id": "43d263d080800019", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "The `batchtools` job submission tool uses `environmental variables` to pass values to our `config` object created by `specs.SimConfig`, these `environmental variables` are captured during the `specs` `object creation` which occurs during the batchtools `import` (from the `batchtools` `__init__.py`:\n", + "```\n", + "from netpyne.batchtools.runners import NetpyneRunner\n", + "specs = NetpyneRunner()\n", + "```" + ], + "metadata": { + "collapsed": false + }, + "id": "710cc6084bd7af02" + }, + { + "cell_type": "markdown", + "source": [ + "Let's `export` some `environmental variables` to pass values to our `config` object. When this is handled by the `batchtools` `subpackage`, this occurs automatically..." + ], + "metadata": { + "collapsed": false + }, + "id": "52704684f5e80f3c" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "%env STRRUNTK0 =foo.bar=baz\n", + "%env FLOATRUNTK1 =float_val=7.7\n", + "from netpyne.batchtools import NetpyneRunner\n", + "specs = NetpyneRunner()" + ], + "metadata": { + "collapsed": false + }, + "id": "50de117ff7f43aa6", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "One way of retrieving these values is by calling `specs.get_mappings()`" + ], + "metadata": { + "collapsed": false + }, + "id": "fac14e517044b980" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(specs.get_mappings())" + ], + "metadata": { + "collapsed": false + }, + "id": "257fad390f4abce", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Now, let's create our `config` object using the `specs.SimConfig()` constructor\n", + "This `config` object will hold a `dictionary` such that the initial values `foo['bar']` = `not_baz` and a `float_val` = `3.3`" + ], + "metadata": { + "collapsed": false + }, + "id": "92d41061bb828744" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "cfg = specs.SimConfig()\n", + "cfg.foo = {'bar': 'not_baz', 'qux': 'quux'}\n", + "cfg.float_val = 3.3\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))" + ], + "metadata": { + "collapsed": false + }, + "id": "ca121d6ab30c3e7b", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Finally, calling the `cfg.update_cfg()` method will overwrite the original values with our environment values, (`baz` and `7.7`)...\n", + "\n", + "in NetPyNE, this was originally handled with the:\n", + "```\n", + "try:\n", + " from __main__ import cfg\n", + "except:\n", + " from cfg import cfg\n", + "```\n", + "API idiom in the `netParams.py` file...\n", + " \n", + "as well as the \n", + "```\n", + "cfg, netParams = sim.readCmdLineArgs(simConfigDefault='src/cfg.py', netParamsDefault='src/netParams.py')\n", + "```\n", + "API idiom in the `init.py` file...\n", + "\n", + "using the `batchtools` subpackage, we can treat the `cfg` as an object and pass it between scripts via `import` statements...\n", + "in `netParams.py`...\n", + "```\n", + "from cfg import cfg\n", + "cfg.update()\n", + "```\n", + "in `init.py`...\n", + "```\n", + "from netParams import cfg, netParams\n", + "sim.createSimulateAnalyze(simConfig=cfg, netParams=netParams)\n", + "```" + ], + "metadata": { + "collapsed": false + }, + "id": "6ea43f729d0685d4" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(\"prior to cfg.update()\")\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))\n", + "print()\n", + "cfg.update() # call update_cfg to update values in the cfg object with values assigned by batch\n", + "print(\"after the cfg.update()\")\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))" + ], + "metadata": { + "collapsed": false + }, + "id": "a9426b6e6594961", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Finally, the `comm object` is used to report to the monitoring `dispatcher object`\n", + "the means of communication is dependent on which `dispatcher object` is instantiated, and communicated through environmental variables\n", + "in this case, since there is no `dispatcher object` the `comm` methods will simply perform `pass operations`" + ], + "metadata": { + "collapsed": false + }, + "id": "65bbb0ef2c76295a" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.initialize() # initializes comm object, establishing channel to communicate with the host dispatcher object" + ], + "metadata": { + "collapsed": false + }, + "id": "e9141d91d6e02aa3", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(comm.is_host()) # returns a boolean IF the calling process is the 0th ranked parallelcontext, similar to sim.pc.rank == 0" + ], + "metadata": { + "collapsed": false + }, + "id": "5ed6a524bd8a3e0b", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.send('message') # sends 'message' to the `dispatcher object`" + ], + "metadata": { + "collapsed": false + }, + "id": "1966edbf32649352", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.close() #finalizes communication, closes any resources used to communicate with the `dispatcher object`" + ], + "metadata": { + "collapsed": false + }, + "id": "34f021af4127363c" + }, + { + "cell_type": "markdown", + "source": [], + "metadata": { + "collapsed": false + }, + "id": "648746fff96b8a72" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/netpyne/batchtools/docs/batchtools.rst b/netpyne/batchtools/docs/batchtools.rst new file mode 100644 index 000000000..c21bf4da9 --- /dev/null +++ b/netpyne/batchtools/docs/batchtools.rst @@ -0,0 +1,435 @@ +Running a Batch Job +=================== + +The NetPyNE batchtools subpackage provides a method of automating job submission and reporting + +A diagram of the object interfaces... + +:: + + batch<-->\ /---> configuration_0 >---\ + \ / specs---\ + \<--->dispatcher_0 sim_0 + \ \ comm ---/ + \ \---< results_0 <---/ + \ + \ /---> configuration_1 >---\ + \ / specs---\ + \<--->dispatcher_1 sim_1 + \ \ comm ---/ + \ \---< results_1 <---/ + \ + \ + ... + +While objects and interfaces can be handled directly, batchtools offers simple wrapper commands applicable to most use-cases, where +automatic parameter searches can be done by specifying a search space and algorithm through `netpyne.batchtools.search`, and +parameter to model translation and result communication is handled through `netpyne.batchtools.specs` and `netpyne.batchtools.comm` respectively. + +A diagram of the wrapper interactions... + +:: + + netpyne.batchtools.search.search( ) ----------------------------\ host + | | + | search( ) | + ============================================================================================== + | comm.initialize( ) + | comm.send( ) + | cfg = netpyne.batchtools.specs.SimConfig( ) comm.close( ) + | | ^ ^ + v v | | + cfg.update_cfg() ----------------------------------------/ | + | + send( ) netpyne.batchtools.comm( ) + simulation + +1. Setting up batchtools +------------------------ +Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. + +The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: + + git clone https://github.com/Neurosim-lab/netpyne.git + cd netpyne + git checkout batch + pip install -e . + +The batchtools installation either:: + + pip install -u batchtk + +or a development install (recommended):: + + git clone https://github.com/jchen6727/batchtk.git + cd batchtk + pip install -e . + +Ray is a dependency for batchtools, and should be installed with the following command:: + + pip install -u ray[default] + +2. Examples +----------- +Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `on the NetPyNE github `_. + +Examples of the underlying batchtk package can be in the ``examples`` directory `on the batchtk github `_. + +3. Retrieving batch configuration values through the ``specs`` object +--------------------------------------------------------------------- +Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with +the dispatcher through the ``comm`` object. + +importing the relevant objects:: + + from netpyne.batchtools import specs, comm + cfg = specs.SimConfig() # create a SimConfig object, can be provided with a dictionary on initial call to set initial values + netParams = specs.NetParams() # create a netParams object + +``netpyne.batchtools.specs`` behaves similarly to ``netpyne.sim.specs`` except in the following cases: + +* ``netpyne.batchtools.specs`` automatically captures relevant configuration mappings created by the ``dispatcher`` upon initialization + + * these mappings can be retrieved via ``specs.get_mappings()`` + +* the SimConfig object created by ``netpyne.batch.specs.SimConfig()`` will update itself with relevant configuration mappings through the ``update()`` method:: + + from netpyne.batchtools import specs # import the custom batch specs + cfg = specs.SimConfig() # create a SimConfig object + cfg.update() # update the cfg object with any relevant mappings for this particular batch job + +The ``update`` method will update the ``SimConfig`` object with the configuration mappings captured in ``specs`` (see: ``specs.get_mappings()``) + +This replaces the previous idiom for updating the SimConfig object with mappings from the batched job submission:: + + try: + from __main__ import cfg # import SimConfig object with params from parent module + except: + from cfg import cfg # if no simConfig in parent module, import directly from tut8_cfg module + + + + +4. Communicating results to the ``dispatcher`` with the ``comm`` object +----------------------------------------------------------------------- + +Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back +The ``comm`` object determines the method of communication based on the batch job submission type. + +In terms of the simulation, the following functions are available to the user: + +* **comm.initialize()**: establishes a connection with the batch ``dispatcher`` for sending data + +* **comm.send()**: sends ```` to the batch ``dispatcher`` + + * for ``search`` jobs, it is important to match the data sent with the metric specified in the search function + +* **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` + +5. Specifying a batch job +------------------------- +Batch job handling is implemented with methods from ``netpyne.batchtools.search`` + +**search** + +.. code-block:: python + + def search(job_type: str, # the submission engine to run a single simulation (e.g. 'sge', 'sh') + comm_type: str, # the method of communication between host dispatcher and the simulation (e.g. 'socket', 'filesystem') + run_config: Dict, # batch configuration, (keyword: string pairs to customize the submit template) + params: Dict, # search space (dictionary of parameter keys: tune search spaces) + algorithm: Optional[str] = "variant_generator", # search algorithm to use, see SEARCH_ALG_IMPORT for available options + label: Optional[str] = 'search', # label for the search + output_path: Optional[str] = '../batch', # directory for storing generated files + checkpoint_path: Optional[str] = '../ray', # directory for storing checkpoint files + max_concurrent: Optional[int] = 1, # number of concurrent trials to run at one time + batch: Optional[bool] = True, # whether concurrent trials should run synchronously or asynchronously + num_samples: Optional[int] = 1, # number of trials to run + metric: Optional[str] = "loss", # metric to optimize (this should match some key: value pair in the returned data + mode: Optional[str] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric + algorithm_config: Optional[dict] = None, # additional configuration for the search algorithm + ) -> tune.ResultGrid: # results of the search + +The basic search implemented with the ``search`` function uses ``ray.tune`` as the search algorithm backend, returning a ``tune.ResultGrid`` which can be used to evaluate the search space and results. It takes the following parameters; + +* **job_type**: either "``sge``" or "``sh``", specifying how the job should be submitted, "``sge``" will submit batch jobs through the Sun Grid Engine. "``sh``" will submit bach jobs through the shell on a local machine +* **comm_type**: either "``socket``" or "``filesystem``", specifying how the job should communicate with the dispatcher +* **run_config**: a dictionary of keyword: string pairs to customize the submit template, the expected keyword: string pairs are dependent on the job_type:: + + ======= + sge + ======= + queue: the queue to submit the job to (#$ -q {queue}) + cores: the number of cores to request for the job (#$ -pe smp {cores}) + vmem: the amount of memory to request for the job (#$ -l h_vmem={vmem}) + realtime: the amount of time to request for the job (#$ -l h_rt={realtime}) + command: the command to run for the job + + example: + run_config = { + 'queue': 'cpu.q', # request job to be run on the 'cpu.q' queue + 'cores': 8, # request 8 cores for the job + 'vmem': '8G', # request 8GB of memory for the job + 'realtime': '24:00:00', # set timeout of the job to 24 hours + 'command': 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py' + } # set the command to be run to 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py' + + ======= + sh + ======= + command: the command to run for the job + + example: + run_config = { + 'command': 'mpiexec -n 8 nrniv -python -mpi init.py' + } # set the command to be run + +* **params**: a dictionary of config values to perform the search over. The keys of the dictionary should match the keys of the config object to be updated. Lists or numpy generators >2 values will force a grid search over the values; otherwise, a list of two values will create a uniform distribution sample space. + + **usage 1**: updating a constant value specified in the ``SimConfig`` object + +.. code-block:: python + + # take a config object with the following parameter ``foo`` + cfg = specs.SimConfig() + cfg.foo = 0 + cfg.update() + + # specify a search space for ``foo`` such that a simulation will run with: + # cfg.foo = 0 + # cfg.foo = 1 + # cfg.foo = 2 + # ... + # cfg.foo = 9 + + # using: + params = { + 'foo': range(10) + } + + **usage 2**: updating a nested object in the ``SimConfig`` object + +.. code-block:: python + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following parameter object ``foo`` + cfg = specs.SimConfig() + cfg.foo = {'bar': 0, 'baz': 0} + cfg.update() + + # specify a search space for ``foo['bar']`` with `foo.bar` such that a simulation will run: + # cfg.foo['bar'] = 0 + # cfg.foo['bar'] = 1 + # cfg.foo['bar'] = 2 + # ... + # cfg.foo['bar'] = 9 + + # using: + params = { + 'foo.bar': range(10) + } + + # this reflection works with nested objects as well... + # i.e. + # cfg.foo = {'bar': {'baz': 0}} + # params = {'foo.bar.baz': range(10)} + + **usage 3**: updating a list object in the ``SimConfig`` object + +.. code-block:: python + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following + cfg = specs.SimConfig() + cfg.foo = [0, 1, 4, 9, 16] + cfg.update() + + # specify a search space for ``foo[0]`` with `foo.0` such that a simulation will run: + # cfg.foo[0] = 0 + # cfg.foo[0] = 1 + # cfg.foo[0] = 2 + # ... + # cfg.foo[0] = 9 + + # using: + params = { + 'foo.0': range(10) + } + + # this reflection works with nested objects as well... + +* **algorithm** : the search algorithm (supported within ``ray.tune``) + + **Supported algorithms** + +.. code-block:: python + + * "variant_generator": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "random": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "axe": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "bayesopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "hyperopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "bohb": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "nevergrad": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "optuna": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "hebo": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "sigopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + * "zoopt": optimization algorithm (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) + +* **label**: a label for the search, used for output file naming + +* **output_path**: the directory for storing generated files, can be a relative or absolute path + +* **checkpoint_path**: the directory for storing checkpoint files in case the search needs to be restored, can be a relative or absolute path + +* **max_concurrent**: the number of concurrent trials to run at one time, it is recommended to keep in mind the resource usage of each trial to avoid overscheduling + +* **batch**: whether concurrent trials should run synchronously or asynchronously + +* **num_samples**: the number of trials to run, for any grid search, each value in the grid will be sampled ``num_samples`` times. + +* **metric**: the metric to optimize (this should match some key: value pair in the returned data) + +* **mode**: either 'min' or 'max' (whether to minimize or maximize the metric) + +* **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) + +6. Batch searches on the Rosenbrock function (some simple examples) +------------------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ contains multiple methods of performing automatic parameter search of a +2 dimensional Rosenbrock function. These examples are used to quickly demonstrate some of the functionality of batch communications rather than the full process of running parameter searches on a detailed +NEURON simulation (see 7. Performing parameter optimization searches (CA3 example)) and therefore only contain the a `batch.py` file containing the script detailing the parameter space and search method, and a +`rosenbrock.py` file containing the function to explore, and the appropriate declarations and calls for batch automation and communication (rather than the traditional `cfg.py`, `netParams.py`, and `init.py` files). + +1. `basic_rosenbrock `_ + +This demonstrates a basic grid search of the Rosenbrock function using the new ``batchtools``, where the search space is defined as the cartesian product of ``params['x0']`` and ``params['x1']`` + +.. code-block:: python + + # from batch.py + params = {'x0': [0, 3], + 'x1': [0, 3], + } + +that is, with the values ``cfg.x0``, ``cfg.x1`` iterating over: ``[(0, 0), (0, 3), (3, 0), (3, 3)]`` list + +2. `coupled_rosenbrock `_ + +This demonstrates a basic paired grid search, where ``x0`` is ``[0, 1, 2]`` and x1[n] is ``x0[n]**2`` + +.. code-block:: python + + # from batch.py + x0 = numpy.arange(0, 3) + x1 = x0**2 + + x0_x1 = [*zip(x0, x1)] + params = {'x0_x1': x0_x1 + } + +the ``x0`` and ``x1`` values are paired together to create a search space ``x0_x1`` iterating over: ``[(0, 0), (1, 1), (2, 4)]`` list + +then, in the ``rosenbrock.py`` file, a list of two values ``cfg.x0_x1`` is created to capture the ``x0_x1`` values, which is then unpacked into individual ``x0`` and ``x1`` values + +.. code-block:: python + + # from rosenbrock.py + cfg.x0_x1 = [1, 1] + + cfg.update_cfg() + + # -------------- unpacking x0_x1 list -------------- # + x0, x1 = cfg.x0_x1 + +then the Rosenbrock function is evaluated with the unpacked ``x0`` and ``x1`` + +3. `random_rosenbrock `_ + +This demonstrates a grid search over a nested object, where ``xn`` is a list of 2 values which are independently modified to search the cartesian product of ``[0, 1, 2, 3, 4]`` and ``[0, 1, 2, 3, 4]`` + +.. code-block:: python + + # from batch.py + params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +By using ``xn.0`` and ``xn.1`` we can reference the 0th and 1st elements of the list, which is created and modified in rosenbrock.py + +.. code-block:: python + + # from rosenbrock.py + cfg.xn = [1, 1] + + cfg.update_cfg() + + # ---------------- unpacking x list ---------------- # + x0, x1 = cfg.xn + + +7. Performing parameter optimization searches (CA3 example) +----------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. + +In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined + +.. code-block:: python + + # from optuna_search.py + params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], + 'ampa.PYR->BC' : [0.2e-3, 0.5e-3], + 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], + } + +in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined + +.. code-block:: python + + # from grid_search.py + params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), + 'ampa.PYR->BC' : numpy.linspace(0.2e-3, 0.5e-3, 3), + 'gaba.BC->PYR' : numpy.linspace(0.4e-3, 1.0e-3, 3), + } + +which defines ``3x3x3`` specific values to search over + +Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation + +.. code-block:: python + + # from init.py + results['PYR_loss'] = (results['PYR'] - 3.33875)**2 + results['BC_loss'] = (results['BC'] - 19.725 )**2 + results['OLM_loss'] = (results['OLM'] - 3.470 )**2 + results['loss'] = (results['PYR_loss'] + results['BC_loss'] + results['OLM_loss']) / 3 + out_json = json.dumps({**inputs, **results}) + + print(out_json) + #TODO put all of this in a single function. + comm.send(out_json) + comm.close() + +The ``out_json`` output contains a dictionary which includes the ``loss`` metric (calculated as the MSE between observed and expected values) + +In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) + +8. Parameter Importance Evaluation Using fANOVA +----------------------------------------------- +A new feature in the batchtools beta release is the ability to evaluate parameter importance using a functional ANOVA inspired algorithm via the `Optuna` and `scikit-learn` libraries. +(See `the original Hutter paper `_ and its `citation `_) + +Currently, only unpaired single parameter importance to a single metric score is supported through the `NetPyNE.batchtools.analysis` `Analyzer` object, with an example of its usage +`here `_: + +In its current iteration, demonstrating the example requires generating an output `grid.csv` using `batch.py`, then loading that `grid.csv` into the `Analyzer` object. Then, using `run_analysis` will generate, per parameter, a single score indicative of the estimated `importance` of the parameter: that is, the estimated effect on the total variance of the model within the given bounds. + +.. code-block:: python + + # from analysis.py + from netpyne.batchtools.analysis import Analyzer + + analyzer = Analyzer(params = ['x.0', 'x.1', 'x.2', 'x.3'], metrics = ['fx']) # specify the parameter space and metrics of the batch function + analyzer.load_file('grid.csv') # load the grid file generated by the batch run + results = analyzer.run_analysis() # run fANOVA analysis and store the importance values in a results dictionary + diff --git a/netpyne/batchtools/evol.py b/netpyne/batchtools/evol.py new file mode 100644 index 000000000..a30d29982 --- /dev/null +++ b/netpyne/batchtools/evol.py @@ -0,0 +1,6 @@ +import numpy +from cmaes import CMA + +from netpyne.batchtools import specs, comm + +# ---- Rosenbrock Function & Constant Definition ---- # diff --git a/netpyne/batchtools/examples/CA3/README.md b/netpyne/batchtools/examples/CA3/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/netpyne/batchtools/examples/CA3/cfg.py b/netpyne/batchtools/examples/CA3/cfg.py new file mode 100644 index 000000000..8780792fc --- /dev/null +++ b/netpyne/batchtools/examples/CA3/cfg.py @@ -0,0 +1,48 @@ +from netpyne.batchtools import specs + +### config ### + +cfg = specs.SimConfig() + +cfg.duration = 1000 +cfg.dt = 0.1 +cfg.hparams = {'v_init': -65.0} +cfg.verbose = False +cfg.recordTraces = {} # don't save this +cfg.recordStim = False +cfg.recordStep = 0.1 # Step size in ms to save data (eg. V traces, LFP, etc) +cfg.filename = '00' # Set file output name +cfg.savePickle = False # Save params, network and sim output to pickle file +cfg.saveDat = False +cfg.saveJson = True +cfg.printRunTime = 0.1 +cfg.recordLFP = None # don't save this +cfg.simLabel = 'ca3' +cfg.saveFolder = '.' + + +cfg.analysis['plotRaster'] = {'saveFig': True} # raster ok +cfg.analysis['plotTraces'] = { } # don't save this +cfg.analysis['plotLFPTimeSeries'] = { } # don't save this + +cfg.cache_efficient = True # better with MPI? +""" remove all of the unecessary data """ +cfg.saveCellSecs = False +cfg.saveCellConns = False + +cfg.nmda={#NMDA search space + "PYR->BC" : 1.38e-3, + "PYR->OLM": 0.7e-3, + "PYR->PYR": 0.004e-3, +} +cfg.ampa={#AMPA search space + "PYR->BC" : 0.36e-3, + "PYR->OLM": 0.36e-3, + "PYR->PYR": 0.02e-3, +} + +cfg.gaba = {#GABA search space + "BC->BC" : 4.5e-3, + "BC->PYR" : 0.72e-3, + "OLM->PYR": 72e-3, +} diff --git a/netpyne/batchtools/examples/CA3/grid_search.py b/netpyne/batchtools/examples/CA3/grid_search.py new file mode 100644 index 000000000..3ad5dedd2 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/grid_search.py @@ -0,0 +1,41 @@ +from netpyne.batchtools.search import search +import numpy + +params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), + #'nmda.PYR->OLM': numpy.linspace(0.4e-3, 1.0e-3, 3), + #'nmda.PYR->PYR': numpy.linspace(0.001e-3, 0.007e-3, 3), + 'ampa.PYR->BC' : numpy.linspace(0.2e-3, 0.5e-3, 3), + #'ampa.PYR->OLM': numpy.linspace(0.2e-3, 0.5e-3, 3), + #'ampa.PYR->PYR': numpy.linspace(0.01e-3, 0.03e-3, 3), + #'gaba.BC->BC' : numpy.linspace(1e-3, 7e-3, 3), + 'gaba.BC->PYR' : numpy.linspace(0.4e-3, 1.0e-3, 3), + #'gaba.OLM->PYR': numpy.linspace(40e-3, 100e-3, 3), + } + +# use batch_shell_config if running directly on the machine +shell_config = {'command': 'mpiexec -np 4 nrniv -python -mpi init.py',} + +# use batch_sge_config if running on a +sge_config = { + 'queue': 'cpu.q', + 'cores': 5, + 'vmem': '4G', + 'realtime': '00:30:00', + 'command': 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py'} + + +run_config = sge_config + +search(job_type = 'sge', # or 'sh' + comm_type = 'socket', + label = 'grid', + + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = run_config, + num_samples = 1, + metric = 'loss', + mode = 'min', + algorithm = "variant_generator", + max_concurrent = 9) diff --git a/netpyne/batchtools/examples/CA3/init.py b/netpyne/batchtools/examples/CA3/init.py new file mode 100644 index 000000000..b84a47b9a --- /dev/null +++ b/netpyne/batchtools/examples/CA3/init.py @@ -0,0 +1,27 @@ +from netpyne.batchtools import specs, comm +from netpyne import sim +from netParams import netParams, cfg +import json + +comm.initialize() + +sim.createSimulate(netParams=netParams, simConfig=cfg) +print('completed simulation...') +#comm.pc.barrier() +#sim.gatherData() +if comm.is_host(): + netParams.save("{}/{}_params.json".format(cfg.saveFolder, cfg.simLabel)) + print('transmitting data...') + inputs = specs.get_mappings() + #print(json.dumps({**inputs})) + results = sim.analysis.popAvgRates(show=False) + + results['PYR_loss'] = (results['PYR'] - 3.33875)**2 + results['BC_loss'] = (results['BC'] - 19.725 )**2 + results['OLM_loss'] = (results['OLM'] - 3.470 )**2 + results['loss'] = (results['PYR_loss'] + results['BC_loss'] + results['OLM_loss']) / 3 + out_json = json.dumps({**inputs, **results}) + + print(out_json) + comm.send(out_json) + comm.close() diff --git a/netpyne/batchtools/examples/CA3/mo_optuna_search.py b/netpyne/batchtools/examples/CA3/mo_optuna_search.py new file mode 100644 index 000000000..25f478051 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mo_optuna_search.py @@ -0,0 +1,50 @@ +from netpyne.batchtools.search import ray_optuna_search +from netpyne.batchtools import dispatchers, submits +import batchtk + +from ray import tune + +params = {'nmda.PYR->BC' : tune.uniform(1e-3, 1.8e-3), + #'nmda.PYR->OLM': tune.uniform(0.4e-3, 1.0e-3), + #'nmda.PYR->PYR': tune.uniform(0.001e-3, 0.007e-3), + 'ampa.PYR->BC' : tune.uniform(0.2e-3, 0.5e-3), + #'ampa.PYR->OLM': tune.uniform(0.2e-3, 0.5e-3), + #'ampa.PYR->PYR': tune.uniform(0.01e-3, 0.03e-3), + #'gaba.BC->BC' : tune.uniform(1e-3, 7e-3), + 'gaba.BC->PYR' : tune.uniform(0.4e-3, 1.0e-3), + #'gaba.OLM->PYR': tune.uniform(40e-3, 100e-3), + } + +# use batch_shell_config if running directly on the machine +shell_config = {'command': 'mpiexec -np 4 nrniv -python -mpi init.py',} + +# use batch_sge_config if running on a +sge_config = { + 'queue': 'cpu.q', + 'cores': 5, + 'vmem': '4G', + 'realtime': '00:30:00', + 'command': 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py'} + +run_config = sge_config + +Dispatcher = dispatchers.INETDispatcher +Submit = submits.SGESubmitSOCK +metrics = ['PYR_loss', 'BC_loss', 'OLM_loss', 'loss'] + +ray_study = ray_optuna_search( + dispatcher_constructor = Dispatcher, + submit_constructor=Submit, + params = params, + run_config = run_config, + max_concurrent = 3, + output_path = '../mo_batch', + checkpoint_path = '../ray', + label = 'mo_search', + num_samples = 15, + metric = metrics, + mode = ['min', 'min', 'min', 'loss'],) + +results = { + metric: ray_study.results.get_best_result(metric, 'min') for metric in metrics +} diff --git a/netpyne/batchtools/examples/CA3/mod/CA1ih.mod b/netpyne/batchtools/examples/CA3/mod/CA1ih.mod new file mode 100644 index 000000000..93d435e30 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/CA1ih.mod @@ -0,0 +1,64 @@ +: $Id: CA1ih.mod,v 1.4 2010/12/13 21:35:47 samn Exp $ +TITLE Ih CA3 + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) +} + +NEURON { + SUFFIX hcurrent + NONSPECIFIC_CURRENT ih + RANGE g, e, v50, htau, hinf + RANGE gfactor +} + +PARAMETER { + celsius (degC) + g= 0.0001 (mho/cm2) + e= -30 (mV) + v50=-82 (mV) + gfactor = 1 +} + +STATE { + h +} + +ASSIGNED { + ih (mA/cm2) + hinf + htau (ms) + v (mV) +} + +PROCEDURE iassign () { ih=g*h*(v-e)*gfactor } + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + h'= (hinf- h)/ htau +} + +INITIAL { + rates(v) + h = hinf + iassign() +} + +PROCEDURE rates(v (mV)) { + UNITSOFF + : HCN1 + :hinf = 1/(1+exp(0.151*(v-v50))) + :htau = exp((0.033*(v+75)))/(0.011*(1+exp(0.083*(v+75)))) + + : HCN2 + hinf = 1/(1+exp((v-v50)/10.5)) + htau = (1/(exp(-14.59-0.086*v)+exp(-1.87+0.0701*v))) + UNITSON +} + diff --git a/netpyne/batchtools/examples/CA3/mod/CA1ika.mod b/netpyne/batchtools/examples/CA3/mod/CA1ika.mod new file mode 100644 index 000000000..9e4fe6922 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/CA1ika.mod @@ -0,0 +1,85 @@ +: $Id: CA1ika.mod,v 1.2 2010/12/01 05:06:07 samn Exp $ +TITLE Ika CA1 + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) +} + +NEURON { + SUFFIX kacurrent + NONSPECIFIC_CURRENT ika, ikad + RANGE g, gd, e, ninf, ntau, ndinf, ndtau, linf, ltau +} + +PARAMETER { + celsius (degC) + g= 0.048 (mho/cm2) + gd= 0 (mho/cm2) + e= -90 (mV) +} + +STATE { + n + nd : distal + l +} + +ASSIGNED { + v (mV) + ika (mA/cm2) + ikad (mA/cm2) + ninf + ntau (ms) + ndinf + ndtau (ms) + linf + ltau (ms) +} + +PROCEDURE iassign () { + ika=g*n*l*(v-e) + ikad=gd*nd*l*(v-e) +} + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + n'= (ninf- n)/ ntau + l'= (linf- l)/ ltau + nd'= (ndinf-nd)/ndtau +} + +INITIAL { + rates(v) + n = ninf + l = linf + iassign() +} + +PROCEDURE rates(v (mV)) { + LOCAL a, b + UNITSOFF + a = exp(-0.038*(1.5+1/(1+exp(v+40)/5))*(v-11)) + b = exp(-0.038*(0.825+1/(1+exp(v+40)/5))*(v-11)) + ntau=4*b/(1+a) + if (ntau<0.1) {ntau=0.1} + ninf=1/(1+a) + + a=exp(-0.038*(1.8+1/(1+exp(v+40)/5))*(v+1)) + b=exp(-0.038*(0.7+1/(1+exp(v+40)/5))*(v+1)) + ndtau=2*b/(1+a) + if (ndtau<0.1) {ndtau=0.1} + ndinf=1/(1+a) + + a = exp(0.11*(v+56)) + ltau=0.26*(v+50) + if (ltau<2) {ltau=2} + linf=1/(1+a) + UNITSON +} + diff --git a/netpyne/batchtools/examples/CA3/mod/CA1ikdr.mod b/netpyne/batchtools/examples/CA3/mod/CA1ikdr.mod new file mode 100644 index 000000000..4c5236362 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/CA1ikdr.mod @@ -0,0 +1,60 @@ +: $Id: CA1ikdr.mod,v 1.2 2010/12/01 05:10:52 samn Exp $ +TITLE IKDR CA1 + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) +} + +NEURON { + SUFFIX kdrcurrent + NONSPECIFIC_CURRENT ik + RANGE g, e, ninf, ntau +} + +PARAMETER { + celsius (degC) + g = 0.010 (mho/cm2) + e = -90 (mV) +} + +STATE { + n +} + +ASSIGNED { + v (mV) + ik (mA/cm2) + ninf + ntau (ms) +} + +PROCEDURE iassign () { ik=g*n*(v-e) } + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + n'= (ninf- n)/ ntau +} + +INITIAL { + rates(v) + n = ninf + iassign() +} + +PROCEDURE rates(v (mV)) { + LOCAL a, b + UNITSOFF + a = exp(-0.11*(v-13)) + b = exp(-0.08*(v-13)) + ntau=50*b/(1+a) + if (ntau<2) {ntau=2} + ninf=1/(1+a) + UNITSON +} + diff --git a/netpyne/batchtools/examples/CA3/mod/CA1ina.mod b/netpyne/batchtools/examples/CA3/mod/CA1ina.mod new file mode 100644 index 000000000..d33ab9739 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/CA1ina.mod @@ -0,0 +1,89 @@ +: $Id: CA1ina.mod,v 1.4 2010/11/30 19:50:00 samn Exp $ +TITLE INa CA1 + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) +} + +NEURON { + SUFFIX nacurrent + NONSPECIFIC_CURRENT ina + RANGE g, e, vi, ki + RANGE minf,hinf,iinf,mtau,htau,itau : testing +} + +PARAMETER { + : v (mV) + celsius (degC) + g = 0.032 (mho/cm2) + e = 55 (mV) + vi = -60 (mV) + ki = 0.8 +} + +STATE { + m + h + I : i +} + +ASSIGNED { + i (mA/cm2) + ina (mA/cm2) + minf + mtau (ms) + hinf + htau (ms) + iinf + itau (ms) + v (mV) : testing +} + +: PROCEDURE iassign () { ina=g*m*m*m*h*i*(v-e) } +PROCEDURE iassign () { i=g*m*m*m*h*I*(v-e) ina=i} + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + m' = (minf - m) / mtau + h' = (hinf - h) / htau + : i' = (iinf - i) / itau + I' = (iinf - I) / itau +} + +INITIAL { + rates(v) + h = hinf + m = minf + : i = iinf + I = iinf + iassign() : testing +} + + +PROCEDURE rates(v (mV)) { + LOCAL a, b + UNITSOFF + a = 0.4*(v+30)/(1-exp(-(v+30)/7.2)) + b = 0.124*(v+30)/(exp((v+30)/7.2)-1) + mtau=0.5/(a+b) + if (mtau<0.02) {mtau=0.02} + minf=a/(a+b) + a = 0.03*(v+45)/(1-exp(-(v+45)/1.5)) + b = 0.01*(v+45)/(exp((v+45)/1.5)-1) + htau=0.5/(a+b) + if (htau<0.5) {htau=0.5} + hinf=1/(1+exp((v+50)/4)) + a = exp(0.45*(v+66)) + b = exp(0.09*(v+66)) + itau=3000*b/(1+a) + if (itau<10) {itau=10} + iinf=(1+ki*exp((v-vi)/2))/(1+exp((v-vi)/2)) + UNITSON +} + diff --git a/netpyne/batchtools/examples/CA3/mod/MyExp2SynBB.mod b/netpyne/batchtools/examples/CA3/mod/MyExp2SynBB.mod new file mode 100644 index 000000000..9a68baef1 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/MyExp2SynBB.mod @@ -0,0 +1,67 @@ +: $Id: MyExp2SynBB.mod,v 1.4 2010/12/13 21:27:51 samn Exp $ +NEURON { +: THREADSAFE + POINT_PROCESS MyExp2SynBB + RANGE tau1, tau2, e, i, g, Vwt, gmax + NONSPECIFIC_CURRENT i +} + +UNITS { + (nA) = (nanoamp) + (mV) = (millivolt) + (uS) = (microsiemens) +} + +PARAMETER { + tau1=.1 (ms) <1e-9,1e9> + tau2 = 10 (ms) <1e-9,1e9> + e=0 (mV) + gmax = 1e9 (uS) + Vwt = 0 : weight for inputs coming in from vector +} + +ASSIGNED { + v (mV) + i (nA) + g (uS) + factor + etime (ms) +} + +STATE { + A (uS) + B (uS) +} + +INITIAL { + LOCAL tp + + Vwt = 0 : testing + + if (tau1/tau2 > .9999) { + tau1 = .9999*tau2 + } + A = 0 + B = 0 + tp = (tau1*tau2)/(tau2 - tau1) * log(tau2/tau1) + factor = -exp(-tp/tau1) + exp(-tp/tau2) + factor = 1/factor +} + +BREAKPOINT { + SOLVE state METHOD cnexp + g = B - A + if (g>gmax) {g=gmax}: saturation + i = g*(v - e) +} + +DERIVATIVE state { + A' = -A/tau1 + B' = -B/tau2 +} + +NET_RECEIVE(w (uS)) {LOCAL ww + ww=w + A = A + ww*factor + B = B + ww*factor +} diff --git a/netpyne/batchtools/examples/CA3/mod/MyExp2SynNMDABB.mod b/netpyne/batchtools/examples/CA3/mod/MyExp2SynNMDABB.mod new file mode 100644 index 000000000..01291643a --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/MyExp2SynNMDABB.mod @@ -0,0 +1,108 @@ +: $Id: MyExp2SynNMDABB.mod,v 1.4 2010/12/13 21:28:02 samn Exp $ +NEURON { +: THREADSAFE + POINT_PROCESS MyExp2SynNMDABB + RANGE tau1, tau2, e, i, iNMDA, s, sNMDA, r, tau1NMDA, tau2NMDA, Vwt, smax, sNMDAmax + NONSPECIFIC_CURRENT i, iNMDA +} + +UNITS { + (nA) = (nanoamp) + (mV) = (millivolt) + (uS) = (microsiemens) +} + +PARAMETER { + tau1 = 0.1 (ms) <1e-9,1e9> + tau2 = 10 (ms) <1e-9,1e9> + tau1NMDA = 15 (ms) + tau2NMDA = 150 (ms) + e = 0 (mV) + mg = 1 + r = 1 + smax = 1e9 (1) + sNMDAmax = 1e9 (1) + + Vwt = 0 : weight for inputs coming in from vector +} + +ASSIGNED { + v (mV) + i (nA) + iNMDA (nA) + s (1) + sNMDA (1) + mgblock (1) + factor (1) + factor2 (1) + + etime (ms) +} + +STATE { + A (1) + B (1) + A2 (1) + B2 (1) +} + +INITIAL { + + LOCAL tp + + Vwt = 0 : testing + + if (tau1/tau2 > .9999) { + tau1 = .9999*tau2 + } + A = 0 + B = 0 + tp = (tau1*tau2)/(tau2 - tau1) * log(tau2/tau1) + factor = -exp(-tp/tau1) + exp(-tp/tau2) + factor = 1/factor + + if (tau1NMDA/tau2NMDA > .9999) { + tau1NMDA = .9999*tau2NMDA + } + A2 = 0 + B2 = 0 + tp = (tau1NMDA*tau2NMDA)/(tau2NMDA - tau1NMDA) * log(tau2NMDA/tau1NMDA) + factor2 = -exp(-tp/tau1NMDA) + exp(-tp/tau2NMDA) + factor2 = 1/factor2 +} + +BREAKPOINT { + SOLVE state METHOD cnexp + : Jahr Stevens 1990 J. Neurosci + mgblock = 1.0 / (1.0 + 0.28 * exp(-0.062(/mV) * v) ) + s = B - A + sNMDA = B2 - A2 + if (s >smax) {s =smax }: saturation + if (sNMDA>sNMDAmax) {sNMDA=sNMDAmax}: saturation + i = s * (v - e) + iNMDA = sNMDA * (v - e) * mgblock +} + +DERIVATIVE state { + A' = -A/tau1 + B' = -B/tau2 + A2' = -A2/tau1NMDA + B2' = -B2/tau2NMDA +} + +NET_RECEIVE(w (uS)) {LOCAL ww + ww=w + :printf("NMDA Spike: %g\n", t) + if(r>=0){ : if r>=0, g = AMPA + NMDA*r + A = A + factor *ww + B = B + factor *ww + A2 = A2 + factor2*ww*r + B2 = B2 + factor2*ww*r + }else{ + if(r>-1000){ : if r>-1, g = NMDA*r + A2 = A2 - factor2*ww*r + B2 = B2 - factor2*ww*r + } + : if r<0 and r<>-1, g = 0 + } +} diff --git a/netpyne/batchtools/examples/CA3/mod/aux_fun.inc b/netpyne/batchtools/examples/CA3/mod/aux_fun.inc new file mode 100644 index 000000000..ccb579afb --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/aux_fun.inc @@ -0,0 +1,43 @@ +: $Id: aux_fun.inc,v 1.1 2009/11/04 01:24:52 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + + + +:------------------------------------------------------------------- +FUNCTION fun1(v(mV),V0(mV),A(/ms),B(mV))(/ms) { + + fun1 = A*exp((v-V0)/B) +} + +FUNCTION fun2(v(mV),V0(mV),A(/ms),B(mV))(/ms) { + + fun2 = A/(exp((v-V0)/B)+1) +} + +FUNCTION fun3(v(mV),V0(mV),A(/ms),B(mV))(/ms) { + + if(fabs((v-V0)/B)<1e-6) { + :if(v==V0) { + fun3 = A*B/1(mV) * (1- 0.5 * (v-V0)/B) + } else { + fun3 = A/1(mV)*(v-V0)/(exp((v-V0)/B)-1) + } +} + +FUNCTION min(x,y) { if (x<=y){ min = x }else{ min = y } } +FUNCTION max(x,y) { if (x>=y){ max = x }else{ max = y } } diff --git a/netpyne/batchtools/examples/CA3/mod/caolmw.mod b/netpyne/batchtools/examples/CA3/mod/caolmw.mod new file mode 100644 index 000000000..3ea21a7ef --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/caolmw.mod @@ -0,0 +1,47 @@ +: $Id: caolmw.mod,v 1.2 2010/11/30 16:40:09 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mollar) = (1/liter) + (M) = (mollar) + (mM) = (millimollar) + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) +} + +NEURON { + SUFFIX Caolmw + USEION ca READ ica, cai WRITE cai + RANGE alpha, tau +} + +PARAMETER { + alpha = 0.002 (cm2-M/mA-ms) + tau = 80 (ms) +} + +ASSIGNED { ica (mA/cm2) } + +INITIAL { cai = 0 } + +STATE { cai (mM) } + +BREAKPOINT { SOLVE states METHOD cnexp } + +DERIVATIVE states { cai' = -(1000) * alpha * ica - cai/tau } diff --git a/netpyne/batchtools/examples/CA3/mod/icaolmw.mod b/netpyne/batchtools/examples/CA3/mod/icaolmw.mod new file mode 100644 index 000000000..51112d099 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/icaolmw.mod @@ -0,0 +1,51 @@ +: $Id: icaolmw.mod,v 1.2 2010/11/30 16:44:13 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) +} + +NEURON { + SUFFIX ICaolmw + USEION ca WRITE ica + RANGE gca,eca +} + +PARAMETER { + gca = 1 (mS/cm2) + eca = 120 (mV) +} + +ASSIGNED { + ica (mA/cm2) + v (mV) +} + +PROCEDURE iassign () { ica = (1e-3) * gca * mcainf(v)^2 * (v-eca) } + +INITIAL { + iassign() +} + +BREAKPOINT { iassign() } + +FUNCTION mcainf(v(mV)) { mcainf = fun2(v, -20, 1, -9)*1(ms) } + +INCLUDE "aux_fun.inc" diff --git a/netpyne/batchtools/examples/CA3/mod/iholmw.mod b/netpyne/batchtools/examples/CA3/mod/iholmw.mod new file mode 100644 index 000000000..ccd919202 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/iholmw.mod @@ -0,0 +1,60 @@ +: $Id: iholmw.mod,v 1.2 2010/11/30 16:34:22 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) +} + +NEURON { + SUFFIX Iholmw + NONSPECIFIC_CURRENT i + RANGE gh,eh +} + +PARAMETER { + gh = 0.15 (mS/cm2) + eh = -40 (mV) +} + +ASSIGNED { + v (mV) + i (mA/cm2) +} + +STATE { q } + +PROCEDURE iassign () { i = (1e-3) * gh * q * (v-eh) } + +INITIAL { + q = qinf(v) + iassign() +} + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { q' = (qinf(v)-q)/qtau(v) } + +FUNCTION qinf(v(mV)) { qinf = fun2(v, -80, 1, 10)*1(ms) } +FUNCTION qtau(v(mV))(ms) { qtau = 200(ms)/(exp((v+70(mV))/20(mV))+exp(-(v+70(mV))/20(mV))) + 5(ms) } + +INCLUDE "aux_fun.inc" diff --git a/netpyne/batchtools/examples/CA3/mod/kcaolmw.mod b/netpyne/batchtools/examples/CA3/mod/kcaolmw.mod new file mode 100644 index 000000000..b2368787e --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/kcaolmw.mod @@ -0,0 +1,52 @@ +: $Id: kcaolmw.mod,v 1.2 2010/11/30 16:47:18 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) + (mollar) = (1/liter) + (mM) = (millimollar) +} + +NEURON { + SUFFIX KCaolmw + USEION k WRITE ik + USEION ca READ cai + RANGE gkca,ek,kd +} + +PARAMETER { + gkca = 10 (mS/cm2) + ek = -90 (mV) + kd = 30 (mM) +} + +ASSIGNED { + cai (mM) + v (mV) + ik (mA/cm2) +} + +PROCEDURE iassign () { ik = (1e-3) * gkca * cai/(cai+kd) * (v-ek) } + +INITIAL { + iassign() +} + +BREAKPOINT { iassign() } diff --git a/netpyne/batchtools/examples/CA3/mod/kdrbwb.mod b/netpyne/batchtools/examples/CA3/mod/kdrbwb.mod new file mode 100644 index 000000000..fc52ae534 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/kdrbwb.mod @@ -0,0 +1,76 @@ +: $Id: kdrbwb.mod,v 1.4 2010/12/13 21:35:26 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) +} + +NEURON { + SUFFIX Kdrbwb + USEION k WRITE ik + RANGE phin,gkdr,ek + RANGE taon,ninf +} + +PARAMETER { + gkdr = 9 (mS/cm2) + ek = -90 (mV) + phin = 5 +} + +ASSIGNED { + v (mV) + ik (mA/cm2) + celsius (degC) + ninf (1) + taon (ms) +} + +STATE { n } + +PROCEDURE iassign () { ik = (1e-3) * gkdr * n^4 * (v-ek) } + +INITIAL { + rates(v) + n = ninf + iassign() +} + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + n' = (ninf-n)/taon +} + +PROCEDURE rates(v(mV)) { LOCAL an, bn, q10 + q10 = phin:^((celsius-27.0(degC))/10.0(degC)) + + an = fun3(v, -34, -0.01, -10) + bn = fun1(v, -44, 0.125, -80) + + ninf = an/(an+bn) + taon = 1./((an+bn)*q10) +} + +INCLUDE "aux_fun.inc" diff --git a/netpyne/batchtools/examples/CA3/mod/nafbwb.mod b/netpyne/batchtools/examples/CA3/mod/nafbwb.mod new file mode 100644 index 000000000..37281dc94 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/mod/nafbwb.mod @@ -0,0 +1,81 @@ +: $Id: nafbwb.mod,v 1.4 2010/12/13 21:35:08 samn Exp $ +COMMENT + +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +// +// NOTICE OF COPYRIGHT AND OWNERSHIP OF SOFTWARE +// +// Copyright 2007, The University Of Pennsylvania +// School of Engineering & Applied Science. +// All rights reserved. +// For research use only; commercial use prohibited. +// Distribution without permission of Maciej T. Lazarewicz not permitted. +// mlazarew@seas.upenn.edu +// +//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ENDCOMMENT + +UNITS { + (mA) = (milliamp) + (mV) = (millivolt) + (mS) = (millisiemens) +} + +NEURON { + SUFFIX Nafbwb + USEION na WRITE ina + RANGE phih + RANGE gna, ena, taoh : testing +} + +PARAMETER { + gna = 35 (mS/cm2) + ena = 55 (mV) + phih = 5 +} + +ASSIGNED { + v (mV) + ina (mA/cm2) + minf (1) + hinf (1) + taoh (ms) + celsius (degC) +} + +STATE { h } + +PROCEDURE iassign () { ina = (1e-3) * gna * minf^3 * h * (v-ena) } + +INITIAL { + rates(v) + h = hinf + iassign() +} + +BREAKPOINT { + SOLVE states METHOD cnexp + iassign() +} + +DERIVATIVE states { + rates(v) + h' = (hinf-h)/taoh +} + +PROCEDURE rates(v(mV)) { LOCAL am, bm, ah, bh, q10 + + q10 = phih:^((celsius-27.0(degC))/10.0(degC)) + + am = fun3(v, -35, -0.1, -10) + bm = fun1(v, -60, 4, -18) + minf = am/(am+bm) + + ah = fun1(v, -58, 0.07, -20) + bh = fun2(v, -28, 1, -10) + hinf = ah/(ah+bh) + taoh = 1./((ah+bh)*q10) +} + +INCLUDE "aux_fun.inc" diff --git a/netpyne/batchtools/examples/CA3/netParams.py b/netpyne/batchtools/examples/CA3/netParams.py new file mode 100644 index 000000000..156e9ff20 --- /dev/null +++ b/netpyne/batchtools/examples/CA3/netParams.py @@ -0,0 +1,321 @@ +from netpyne.batchtools import specs +from cfg import cfg + +cfg.update_cfg() +### params ### +# Network parameters +netParams = specs.NetParams() # object of class NetParams to store the network parameters +netParams.defaultThreshold = 0.0 +netParams.defineCellShapes = True # sets 3d geometry aligned along the y-axis + + +############################################################################### +## Cell types +############################################################################### +# Basket cell +BasketCell = {'secs':{}} +BasketCell['secs']['soma'] = {'geom': {}, 'mechs': {}} +BasketCell['secs']['soma']['geom'] = {'diam': 100, 'L': 31.831, 'nseg': 1, 'cm': 1} +BasketCell['secs']['soma']['mechs'] = {'pas': {'g': 0.1e-3, 'e': -65}, 'Nafbwb': {}, 'Kdrbwb': {}} +netParams.cellParams['BasketCell'] = BasketCell + + +# OLM cell +OlmCell = {'secs':{}} +OlmCell['secs']['soma'] = {'geom': {}, 'mechs': {}} +OlmCell['secs']['soma']['geom'] = {'diam': 100, 'L': 31.831, 'nseg': 1, 'cm': 1} +OlmCell['secs']['soma']['mechs'] = { + 'pas': {'g': 0.1e-3, 'e': -65}, + 'Nafbwb': {}, + 'Kdrbwb': {}, + 'Iholmw': {}, + 'Caolmw': {}, + 'ICaolmw': {}, + 'KCaolmw': {}} +netParams.cellParams['OlmCell'] = OlmCell + + +# Pyramidal cell +PyrCell = {'secs':{}} +PyrCell['secs']['soma'] = {'geom': {}, 'mechs': {}} +PyrCell['secs']['soma']['geom'] = {'diam': 20, 'L': 20, 'cm': 1, 'Ra': 150} +PyrCell['secs']['soma']['mechs'] = { + 'pas': {'g': 0.0000357, 'e': -70}, + 'nacurrent': {}, + 'kacurrent': {}, + 'kdrcurrent': {}, + 'hcurrent': {}} +PyrCell['secs']['Bdend'] = {'geom': {}, 'mechs': {}} +PyrCell['secs']['Bdend']['geom'] = {'diam': 2, 'L': 200, 'cm': 1, 'Ra': 150} +PyrCell['secs']['Bdend']['topol'] = {'parentSec': 'soma', 'parentX': 0, 'childX': 0} +PyrCell['secs']['Bdend']['mechs'] = { + 'pas': {'g': 0.0000357, 'e': -70}, + 'nacurrent': {'ki': 1}, + 'kacurrent': {}, + 'kdrcurrent': {}, + 'hcurrent': {}} +PyrCell['secs']['Adend1'] = {'geom': {}, 'mechs': {}} +PyrCell['secs']['Adend1']['geom'] = {'diam': 2, 'L': 150, 'cm': 1, 'Ra': 150} +PyrCell['secs']['Adend1']['topol'] = {'parentSec': 'soma', 'parentX': 1.0, 'childX': 0} # here there is a change: connected to end soma(1) instead of soma(0.5) +PyrCell['secs']['Adend1']['mechs'] = { + 'pas': {'g': 0.0000357, 'e': -70}, + 'nacurrent': {'ki': 0.5}, + 'kacurrent': {'g': 0.072}, + 'kdrcurrent': {}, + 'hcurrent': {'v50': -82, 'g': 0.0002}} +PyrCell['secs']['Adend2'] = {'geom': {}, 'mechs': {}} +PyrCell['secs']['Adend2']['geom'] = {'diam': 2, 'L': 150, 'cm': 1, 'Ra': 150} +PyrCell['secs']['Adend2']['topol'] = {'parentSec': 'Adend1', 'parentX': 1, 'childX': 0} +PyrCell['secs']['Adend2']['mechs'] = { + 'pas': {'g': 0.0000357, 'e': -70}, + 'nacurrent': {'ki': 0.5}, + 'kacurrent': {'g': 0, 'gd': 0.120}, + 'kdrcurrent': {}, + 'hcurrent': {'v50': -90, 'g': 0.0004}} +PyrCell['secs']['Adend3'] = {'geom': {}, 'mechs': {}} +PyrCell['secs']['Adend3']['geom'] = {'diam': 2, 'L': 150, 'cm': 2, 'Ra': 150} +PyrCell['secs']['Adend3']['topol'] = {'parentSec': 'Adend2', 'parentX': 1, 'childX': 0} +PyrCell['secs']['Adend3']['mechs'] = { + 'pas': {'g': 0.0000714, 'e': -70}, + 'nacurrent': {'ki': 0.5}, + 'kacurrent': {'g': 0, 'gd': 0.200}, + 'kdrcurrent': {}, + 'hcurrent': {'v50': -90, 'g': 0.0007}} +netParams.cellParams['PyrCell'] = PyrCell + + +############################################################################### +## Synaptic mechs +############################################################################### + +netParams.synMechParams['AMPAf'] = {'mod': 'MyExp2SynBB', 'tau1': 0.05, 'tau2': 5.3, 'e': 0} +netParams.synMechParams['NMDA'] = {'mod': 'MyExp2SynNMDABB', 'tau1': 0.05, 'tau2': 5.3, 'tau1NMDA': 15, 'tau2NMDA': 150, 'r': 1, 'e': 0} +netParams.synMechParams['GABAf'] = {'mod': 'MyExp2SynBB', 'tau1': 0.07, 'tau2': 9.1, 'e': -80} +netParams.synMechParams['GABAs'] = {'mod': 'MyExp2SynBB', 'tau1': 0.2, 'tau2': 20, 'e': -80} +netParams.synMechParams['GABAss'] = {'mod': 'MyExp2SynBB', 'tau1': 20, 'tau2': 40, 'e': -80} + + +############################################################################### +## Populations +############################################################################### +netParams.popParams['PYR'] = {'cellType': 'PyrCell', 'numCells': 800} +netParams.popParams['BC'] = {'cellType': 'BasketCell', 'numCells': 200} +netParams.popParams['OLM'] = {'cellType': 'OlmCell', 'numCells': 200} + + +############################################################################### +# Current-clamp to cells +############################################################################### +netParams.stimSourceParams['IClamp_PYR'] = {'type': 'IClamp', 'del': 2*0.1, 'dur': 1e9, 'amp': 50e-3} +netParams.stimSourceParams['IClamp_OLM'] = {'type': 'IClamp', 'del': 2*0.1, 'dur': 1e9, 'amp': -25e-3} + +netParams.stimTargetParams['IClamp_PYR->PYR'] = { + 'source': 'IClamp_PYR', + 'sec': 'soma', + 'loc': 0.5, + 'conds': {'pop': 'PYR'}} + +netParams.stimTargetParams['IClamp_OLM->OLM'] = { + 'source': 'IClamp_OLM', + 'sec': 'soma', + 'loc': 0.5, + 'conds': {'pop': 'OLM'}} + + +############################################################################### +# Setting connections +############################################################################### + +# PYR -> X, NMDA +netParams.connParams['PYR->BC_NMDA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'BC'}, + 'convergence': 100, + 'weight': cfg.nmda['PYR->BC'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'NMDA'} + +netParams.connParams['PYR->OLM_NMDA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'OLM'}, + 'convergence': 10, + 'weight': cfg.nmda['PYR->OLM'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'NMDA'} + +netParams.connParams['PYR->PYR_NMDA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'PYR'}, + 'convergence': 25, + 'weight': cfg.nmda['PYR->PYR'], + 'delay': 2, + 'sec': 'Bdend', + 'loc': 1.0, + 'synMech': 'NMDA'} + +# PYR -> X, AMPA +netParams.connParams['PYR->BC_AMPA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'BC'}, + 'convergence': 100, + 'weight': cfg.ampa['PYR->BC'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'AMPAf'} + +netParams.connParams['PYR->OLM_AMPA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'OLM'}, + 'convergence': 10, + 'weight': cfg.ampa['PYR->OLM'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'AMPAf'} + +netParams.connParams['PYR->PYR_AMPA'] = {'preConds': {'pop': 'PYR'}, 'postConds': {'pop': 'PYR'}, + 'convergence': 25, + 'weight': cfg.ampa['PYR->PYR'], + 'delay': 2, + 'sec': 'Bdend', + 'loc': 1.0, + 'synMech': 'AMPAf'} + +# BC -> X, GABA +netParams.connParams['BC->BC_GABA'] = {'preConds': {'pop': 'BC'}, 'postConds': {'pop': 'BC'}, + 'convergence': 60, + 'weight': cfg.gaba['BC->BC'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'GABAf'} + +netParams.connParams['BC->PYR_GABA'] = {'preConds': {'pop': 'BC'}, 'postConds': {'pop': 'PYR'}, + 'convergence': 50, + 'weight': cfg.gaba['BC->PYR'], + 'delay': 2, + 'sec': 'soma', + 'loc': 0.5, + 'synMech': 'GABAf'} + + +# OLM -> PYR, GABA +netParams.connParams['OLM->PYR_GABA'] = {'preConds': {'pop': 'OLM'}, 'postConds': {'pop': 'PYR'}, + 'convergence': 20, + 'weight': cfg.gaba['OLM->PYR'], + 'delay': 2, + 'sec': 'Adend2', + 'loc': 0.5, + 'synMech': 'GABAs'} + + +############################################################################### +# Setting NetStims +############################################################################### +# to PYR +netParams.stimSourceParams['NetStim_PYR_SOMA_AMPA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_PYR_SOMA_AMPA->PYR'] = { + 'source': 'NetStim_PYR_SOMA_AMPA', + 'conds': {'pop': 'PYR'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 4*0.05e-3, # different from published value + 'delay': 2*0.1, + 'synMech': 'AMPAf'} + +netParams.stimSourceParams['NetStim_PYR_ADEND3_AMPA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_PYR_ADEND3_AMPA->PYR'] = { + 'source': 'NetStim_PYR_ADEND3_AMPA', + 'conds': {'pop': 'PYR'}, + 'sec': 'Adend3', + 'loc': 0.5, + 'weight': 4*0.05e-3, # different from published value + 'delay': 2*0.1, + 'synMech': 'AMPAf'} + +netParams.stimSourceParams['NetStim_PYR_SOMA_GABA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_PYR_SOMA_GABA->PYR'] = { + 'source': 'NetStim_PYR_SOMA_GABA', + 'conds': {'pop': 'PYR'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 0.012e-3, + 'delay': 2*0.1, + 'synMech': 'GABAf'} + +netParams.stimSourceParams['NetStim_PYR_ADEND3_GABA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_PYR_ADEND3_GABA->PYR'] = { + 'source': 'NetStim_PYR_ADEND3_GABA', + 'conds': {'pop': 'PYR'}, + 'sec': 'Adend3', + 'loc': 0.5, + 'weight': 0.012e-3, + 'delay': 2*0.1, + 'synMech': 'GABAf'} + +netParams.stimSourceParams['NetStim_PYR_ADEND3_NMDA'] = {'type': 'NetStim', 'interval': 100, 'number': int((1000/100.0)*cfg.duration), 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_PYR_ADEND3_NMDA->PYR'] = { + 'source': 'NetStim_PYR_ADEND3_NMDA', + 'conds': {'pop': 'PYR'}, + 'sec': 'Adend3', + 'loc': 0.5, + 'weight': 6.5e-3, + 'delay': 2*0.1, + 'synMech': 'NMDA'} + +# to BC +netParams.stimSourceParams['NetStim_BC_SOMA_AMPA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_BC_SOMA_AMPA->BC'] = { + 'source': 'NetStim_BC_SOMA_AMPA', + 'conds': {'pop': 'BC'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 0.02e-3, + 'delay': 2*0.1, + 'synMech': 'AMPAf'} + +netParams.stimSourceParams['NetStim_BC_SOMA_GABA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_BC_SOMA_GABA->BC'] = { + 'source': 'NetStim_BC_SOMA_GABA', + 'conds': {'pop': 'BC'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 0.2e-3, + 'delay': 2*0.1, + 'synMech': 'GABAf'} + +# to OLM +netParams.stimSourceParams['NetStim_OLM_SOMA_AMPA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_OLM_SOMA_AMPA->OLM'] = { + 'source': 'NetStim_OLM_SOMA_AMPA', + 'conds': {'pop': 'OLM'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 0.0625e-3, + 'delay': 2*0.1, + 'synMech': 'AMPAf'} + +netParams.stimSourceParams['NetStim_OLM_SOMA_GABA'] = {'type': 'NetStim', 'interval': 1, 'number': 1000*cfg.duration, 'start': 0, 'noise': 1} +netParams.stimTargetParams['NetStim_OLM_SOMA_GABA->OLM'] = { + 'source': 'NetStim_OLM_SOMA_GABA', + 'conds': {'pop': 'OLM'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 0.2e-3, + 'delay': 2*0.1, + 'synMech': 'GABAf'} + +# Medial Septal inputs to BC and OLM cells +netParams.stimSourceParams['Septal'] = {'type': 'NetStim', 'interval': 150, 'number': int((1000/150)*cfg.duration), 'start': 0, 'noise': 0} +netParams.stimTargetParams['Septal->BC'] = { + 'source': 'Septal', + 'conds': {'pop': 'BC'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 1.6e-3, + 'delay': 2*0.1, + 'synMech': 'GABAss'} + +netParams.stimTargetParams['Septal->OLM'] = { + 'source': 'Septal', + 'conds': {'pop': 'OLM'}, + 'sec': 'soma', + 'loc': 0.5, + 'weight': 1.6e-3, + 'delay': 2*0.1, + 'synMech': 'GABAss'} diff --git a/netpyne/batchtools/examples/CA3/optuna_search.py b/netpyne/batchtools/examples/CA3/optuna_search.py new file mode 100644 index 000000000..3ee09881d --- /dev/null +++ b/netpyne/batchtools/examples/CA3/optuna_search.py @@ -0,0 +1,39 @@ +from netpyne.batchtools.search import search + +params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], + #'nmda.PYR->OLM': [0.4e-3, 1.0e-3], + #'nmda.PYR->PYR': [0.001e-3, 0.007e-3], + 'ampa.PYR->BC' : [0.2e-3, 0.5e-3], + #'ampa.PYR->OLM': [0.2e-3, 0.5e-3], + #'ampa.PYR->PYR': [0.01e-3, 0.03e-3], + #'gaba.BC->BC' : [1e-3, 7e-3], + 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], + #'gaba.OLM->PYR': [40e-3, 100e-3], + } + +# use batch_shell_config if running directly on the machine +shell_config = {'command': 'mpiexec -np 4 nrniv -python -mpi init.py',} + +# use batch_sge_config if running on a +sge_config = { + 'queue': 'cpu.q', + 'cores': 5, + 'vmem': '4G', + 'realtime': '00:30:00', + 'command': 'mpiexec -n $NSLOTS -hosts $(hostname) nrniv -python -mpi init.py'} + + +run_config = sge_config + +search(job_type = 'sge', # or sh + comm_type = 'socket', + label = 'optuna', + params = params, + output_path = '../optuna_batch', + checkpoint_path = '../ray', + run_config = run_config, + num_samples = 27, + metric = 'loss', + mode = 'min', + algorithm = 'optuna', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/categorical_strings/batch.py b/netpyne/batchtools/examples/categorical_strings/batch.py new file mode 100644 index 000000000..d8cf9ef17 --- /dev/null +++ b/netpyne/batchtools/examples/categorical_strings/batch.py @@ -0,0 +1,18 @@ +from netpyne.batchtools.search import search + +params = { + 'param_str': [ 'string0', 'string1', 'string2' ] + } + +search(job_type = 'sh', + comm_type = 'socket', + label = 'categorical', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python categorical.py'}, + num_samples = 1, + metric = 'return', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/categorical_strings/categorical.py b/netpyne/batchtools/examples/categorical_strings/categorical.py new file mode 100644 index 000000000..622e90ca1 --- /dev/null +++ b/netpyne/batchtools/examples/categorical_strings/categorical.py @@ -0,0 +1,26 @@ +from netpyne.batchtools import specs, comm +import json + + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'categorical' +cfg.saveFolder = '.' + +cfg.param_str = ['default'] + +cfg.update_cfg() + +# --------------------------------------------------- # + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'return': 0, 'param_str': str(cfg.param_str), 'type': str(type(cfg.param_str))}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/cfg_loading/batch.py b/netpyne/batchtools/examples/cfg_loading/batch.py new file mode 100644 index 000000000..d8cf9ef17 --- /dev/null +++ b/netpyne/batchtools/examples/cfg_loading/batch.py @@ -0,0 +1,18 @@ +from netpyne.batchtools.search import search + +params = { + 'param_str': [ 'string0', 'string1', 'string2' ] + } + +search(job_type = 'sh', + comm_type = 'socket', + label = 'categorical', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python categorical.py'}, + num_samples = 1, + metric = 'return', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/cfg_loading/categorical.csv b/netpyne/batchtools/examples/cfg_loading/categorical.csv new file mode 100644 index 000000000..e2ad391a6 --- /dev/null +++ b/netpyne/batchtools/examples/cfg_loading/categorical.csv @@ -0,0 +1,13 @@ +,data,return,timestamp,checkpoint_dir_name,done,training_iteration,trial_id,date,time_this_iter_s,time_total_s,pid,hostname,node_ip,time_since_restore,iterations_since_restore,config/param_str,config/saveFolder,config/simLabel,logdir +0,"return 0 +param_str string0 +type +dtype: object",0,1724028563,,False,1,de3ff_00000,2024-08-18_19-49-23,1.7559211254119873,1.7559211254119873,87347,Jamess-MacBook-Pro.local,127.0.0.1,1.7559211254119873,1,string0,../grid_batch,categorical_00000,de3ff_00000 +1,"return 0 +param_str string1 +type +dtype: object",0,1724028563,,False,1,de3ff_00001,2024-08-18_19-49-23,1.754591941833496,1.754591941833496,87348,Jamess-MacBook-Pro.local,127.0.0.1,1.754591941833496,1,string1,../grid_batch,categorical_00001,de3ff_00001 +2,"return 0 +param_str string2 +type +dtype: object",0,1724028563,,False,1,de3ff_00002,2024-08-18_19-49-23,1.7577638626098633,1.7577638626098633,87349,Jamess-MacBook-Pro.local,127.0.0.1,1.7577638626098633,1,string2,../grid_batch,categorical_00002,de3ff_00002 diff --git a/netpyne/batchtools/examples/cfg_loading/categorical.py b/netpyne/batchtools/examples/cfg_loading/categorical.py new file mode 100644 index 000000000..3a8005872 --- /dev/null +++ b/netpyne/batchtools/examples/cfg_loading/categorical.py @@ -0,0 +1,13 @@ +from netpyne.batchtools import specs # import the custom batch specs +cfg = specs.SimConfig({'type': 0}) # create a SimConfig object, initializes it with a dictionary {'type': 0} such that +print("cfg.type={}".format(cfg.type)) # cfg.type == 0 +try: + cfg.update({'typo': 1}, force_match=True) # cfg.typo is not defined, so this line will raise an AttributeError +except AttributeError as e: + print(e) +cfg.update({'typo': 1}) # without force_match, the typo attribute cfg.fooo is created and set to 1 +print("cfg.type={}".format(cfg.type)) # cfg.type remains unchanged due to a typo in the attribute name 'type' -> 'typo' +print("cfg.typo={}".format(cfg.typo)) # instead, cfg.typo is created and set to the value 1 + +cfg.test_mappings({'type': 0}) # this will return True, as the mappings are valid +cfg.test_mappings({'missing': 1}) # this will raise an AttributeError, as the 'missing' attribute is not defined \ No newline at end of file diff --git a/netpyne/batchtools/examples/jupyter/batchtools.ipynb b/netpyne/batchtools/examples/jupyter/batchtools.ipynb new file mode 100644 index 000000000..22fe3489f --- /dev/null +++ b/netpyne/batchtools/examples/jupyter/batchtools.ipynb @@ -0,0 +1,314 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "Jupyter Tutorial: The NetPyNE batchtools subpackage\n", + "How to use the `specs` and `comm` to communicate with the `batchtools` `dispatcher`\n" + ], + "metadata": { + "collapsed": false + }, + "id": "89ec6ca2392a9a0d" + }, + { + "cell_type": "markdown", + "source": [ + "For each individual `sim`, communication with the `batchtools` `dispatcher` occurs through the `specs` and `comm` objects" + ], + "metadata": { + "collapsed": false + }, + "id": "be50f40d8e61a944" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from netpyne.batchtools import specs, comm" + ], + "metadata": { + "collapsed": false + }, + "id": "6f321aedb7faf945", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "the `specs` object is an instantiation of a custom class extending the `batchtk` `Runner` ..." + ], + "metadata": { + "collapsed": false + }, + "id": "5f2f08f0b5e582c3" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "help(type(specs))" + ], + "metadata": { + "collapsed": false + }, + "id": "29fa261236494bc3", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "From this `specs` object, we can similarly call `specs.NetParams` and `specs.SimConfig` to create the NetPyNE objects..." + ], + "metadata": { + "collapsed": false + }, + "id": "64ead24451bbad4a" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "help(specs.NetParams)\n", + "help(specs.SimConfig)" + ], + "metadata": { + "collapsed": false + }, + "id": "43d263d080800019", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "The `batchtools` job submission tool uses `environmental variables` to pass values to our `config` object created by `specs.SimConfig`, these `environmental variables` are captured during the `specs` `object creation` which occurs during the batchtools `import` (from the `batchtools` `__init__.py`:\n", + "```\n", + "from netpyne.batchtools.runners import NetpyneRunner\n", + "specs = NetpyneRunner()\n", + "```" + ], + "metadata": { + "collapsed": false + }, + "id": "710cc6084bd7af02" + }, + { + "cell_type": "markdown", + "source": [ + "Let's `export` some `environmental variables` to pass values to our `config` object. When this is handled by the `batchtools` `subpackage`, this occurs automatically..." + ], + "metadata": { + "collapsed": false + }, + "id": "52704684f5e80f3c" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "%env STRRUNTK0 =foo.bar=baz\n", + "%env FLOATRUNTK1 =float_val=7.7\n", + "from netpyne.batchtools import NetpyneRunner\n", + "specs = NetpyneRunner()" + ], + "metadata": { + "collapsed": false + }, + "id": "50de117ff7f43aa6", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "One way of retrieving these values is by calling `specs.get_mappings()`" + ], + "metadata": { + "collapsed": false + }, + "id": "fac14e517044b980" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(specs.get_mappings())" + ], + "metadata": { + "collapsed": false + }, + "id": "257fad390f4abce", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Now, let's create our `config` object using the `specs.SimConfig()` constructor\n", + "This `config` object will hold a `dictionary` such that the initial values `foo['bar']` = `not_baz` and a `float_val` = `3.3`" + ], + "metadata": { + "collapsed": false + }, + "id": "92d41061bb828744" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "cfg = specs.SimConfig()\n", + "cfg.foo = {'bar': 'not_baz', 'qux': 'quux'}\n", + "cfg.float_val = 3.3\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))" + ], + "metadata": { + "collapsed": false + }, + "id": "ca121d6ab30c3e7b", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Finally, calling the `cfg.update_cfg()` method will overwrite the original values with our environment values, (`baz` and `7.7`)...\n", + "\n", + "in NetPyNE, this was originally handled with the:\n", + "```\n", + "try:\n", + " from __main__ import cfg\n", + "except:\n", + " from cfg import cfg\n", + "```\n", + "API idiom in the `netParams.py` file...\n", + " \n", + "as well as the \n", + "```\n", + "cfg, netParams = sim.readCmdLineArgs(simConfigDefault='src/cfg.py', netParamsDefault='src/netParams.py')\n", + "```\n", + "API idiom in the `init.py` file...\n", + "\n", + "using the `batchtools` subpackage, we can treat the `cfg` as an object and pass it between scripts via `import` statements...\n", + "in `netParams.py`...\n", + "```\n", + "from cfg import cfg\n", + "cfg.update()\n", + "```\n", + "in `init.py`...\n", + "```\n", + "from netParams import cfg, netParams\n", + "sim.createSimulateAnalyze(simConfig=cfg, netParams=netParams)\n", + "```" + ], + "metadata": { + "collapsed": false + }, + "id": "6ea43f729d0685d4" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(\"prior to cfg.update()\")\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))\n", + "print()\n", + "cfg.update() # call update_cfg to update values in the cfg object with values assigned by batch\n", + "print(\"after the cfg.update()\")\n", + "print(\"cfg.foo['bar'] = {}\".format(cfg.foo['bar']))\n", + "print(\"cfg.float_val = {}\".format(cfg.float_val))" + ], + "metadata": { + "collapsed": false + }, + "id": "a9426b6e6594961", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Finally, the `comm object` is used to report to the monitoring `dispatcher object`\n", + "the means of communication is dependent on which `dispatcher object` is instantiated, and communicated through environmental variables\n", + "in this case, since there is no `dispatcher object` the `comm` methods will simply perform `pass operations`" + ], + "metadata": { + "collapsed": false + }, + "id": "65bbb0ef2c76295a" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.initialize() # initializes comm object, establishing channel to communicate with the host dispatcher object" + ], + "metadata": { + "collapsed": false + }, + "id": "e9141d91d6e02aa3", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "print(comm.is_host()) # returns a boolean IF the calling process is the 0th ranked parallelcontext, similar to sim.pc.rank == 0" + ], + "metadata": { + "collapsed": false + }, + "id": "5ed6a524bd8a3e0b", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.send('message') # sends 'message' to the `dispatcher object`" + ], + "metadata": { + "collapsed": false + }, + "id": "1966edbf32649352", + "execution_count": null + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "comm.close() #finalizes communication, closes any resources used to communicate with the `dispatcher object`" + ], + "metadata": { + "collapsed": false + }, + "id": "34f021af4127363c" + }, + { + "cell_type": "markdown", + "source": [], + "metadata": { + "collapsed": false + }, + "id": "648746fff96b8a72" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py new file mode 100644 index 000000000..ed6a05474 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py @@ -0,0 +1,21 @@ +from netpyne.batchtools.search import search + +params = {'x0': [0, 3], + 'x1': [0, 3] + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'optuna', + params = params, + output_path = '../optuna_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 9, + metric = 'fx', + mode = 'min', + algorithm = 'optuna', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/cma_batch.py b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/cma_batch.py new file mode 100644 index 000000000..9bde349c6 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/cma_batch.py @@ -0,0 +1,31 @@ +from batchtk.runtk.trial import trial, LABEL_POINTER, PATH_POINTER +from netpyne.batchtools import dispatchers, submits +from cmaes import CMA +import numpy +import os +Dispatcher = dispatchers.INETDispatcher +Submit = submits.SHSubmitSOCK + +cwd = os.getcwd() + +def eval_rosenbrock(x0, x1, tid): + cfg = { + 'x0': x0, + 'x1': x1, + } + submit = Submit() + submit.update_templates(**{'command': 'python rosenbrock.py',}) + label = 'rosenbrock' + return float(trial(cfg, label, tid, Dispatcher, cwd, '../cma', submit)['fx']) + +#data = eval_rosenbrock(1, 1, "x11") + +optimizer = CMA(mean=numpy.zeros(2), sigma=1.0) +for generation in range(3): + solutions = [] + for cand in range(optimizer.population_size): + x = optimizer.ask() + value = eval_rosenbrock(x[0], x[1], "{}_{}".format(cand, generation)) + solutions.append((x, value)) + print(f"#{generation} {value} (x1={x[0]}, x2 = {x[1]})") + optimizer.tell(solutions) diff --git a/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..52fa35e86 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py @@ -0,0 +1,37 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.x0 = 1 +cfg.x1 = 1 + +cfg.update_cfg() + +# --------------------------------------------------- # + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': cfg.x0, 'x1': cfg.x1, 'fx': rosenbrock(cfg.x0, cfg.x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py new file mode 100644 index 000000000..350c99084 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py @@ -0,0 +1,24 @@ +from netpyne.batchtools.search import search +import numpy +x0 = numpy.arange(0, 3) +x1 = x0**2 + +x0_x1 = [*zip(x0.tolist(), x1.tolist())] +params = {'x0_x1': x0_x1 + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'grid', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 1, + metric = 'fx', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) \ No newline at end of file diff --git a/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..cc957b9d3 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py @@ -0,0 +1,41 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.x0_x1 = [1, 1] + +cfg.update_cfg() + +# --------------------------------------------------- # + +# -------------- unpacking x0_x1 list -------------- # +x0, x1 = cfg.x0_x1 +# --------------------------------------------------- # + + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': x0, 'x1': x1, 'fx': rosenbrock(x0, x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/analysis.py b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/analysis.py new file mode 100644 index 000000000..32914a419 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/analysis.py @@ -0,0 +1,6 @@ +from netpyne.batchtools.analysis import Analyzer + +analyzer = Analyzer(params = ['x.0', 'x.1', 'x.2', 'x.3'], metrics = ['fx']) +analyzer.load_file('optuna.csv') +results = analyzer.run_analysis() + diff --git a/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py new file mode 100644 index 000000000..d6229949d --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py @@ -0,0 +1,24 @@ +from netpyne.batchtools.search import search +import numpy + +params = {'x.0': numpy.linspace(-1, 3, 5), + 'x.1': numpy.linspace(-1, 3, 5), + 'x.2': numpy.linspace(-1, 3, 5), + 'x.3': numpy.linspace(-1, 3, 5), + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'grid', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 1, + metric = 'fx', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..e6ce3b29d --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py @@ -0,0 +1,49 @@ +from netpyne.batchtools import specs, comm +import json + +# --- Rosenbrock Functions & Constant Definitions --- # + +""" +The rosenbrock_v0 (coupled rosenbrock) +""" + +A = 1 + + +def rosenbrock_v0(*args): + if len(args) % 2: + raise ValueError('rosenbrock_v0 requires an even number of arguments') + return sum(100 * (args[i]**2 - args[i+1])**2 + (args[i] - A)**2 for i in range(0, len(args), 2)) + + +""" +The rosenbrock_v1 +""" + + +def rosenbrock_v1(*args): + return sum(100 * (args[i+1] - args[i]**2)**2 + (A - args[i])**2 for i in range(0, len(args))) + + +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig({'x': [None] * 4}) + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.update_cfg() + +# --------------------------------------------------- # + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x': cfg.x, 'fx': rosenbrock_v0(*cfg.x)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py new file mode 100644 index 000000000..cf35c487d --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py @@ -0,0 +1,23 @@ +from netpyne.batchtools.search import search +import numpy + + +params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'grid', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 1, + metric = 'fx', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) \ No newline at end of file diff --git a/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..b1956b2db --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py @@ -0,0 +1,41 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.xn = [1, 1] + +cfg.update_cfg() + +# --------------------------------------------------- # + +# ---------------- unpacking x list ---------------- # +x0, x1 = cfg.xn +# --------------------------------------------------- # + + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': x0, 'x1': x1, 'fx': rosenbrock(x0, x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/header.py b/netpyne/batchtools/header.py new file mode 100644 index 000000000..e69de29bb diff --git a/netpyne/batchtools/runners.py b/netpyne/batchtools/runners.py new file mode 100644 index 000000000..8289c8421 --- /dev/null +++ b/netpyne/batchtools/runners.py @@ -0,0 +1,266 @@ +#from batchtk.runtk.utils import convert, set_map, create_script +from batchtk import runtk +from batchtk.runtk.runners import Runner, get_class +import collections +import os +import collections + +def validate(element, container): + try: + match container: + case list(): #container is a list, check validity of index + assert int(element) < len(container) + case dict(): #container is a dictionary, check validity of key + assert element in container + case _: #invalid container type + assert element in container + #raise AttributeError("container type is not supported, cfg attributes support dictionary and " + # "list objects, container {} is of type {}".format(container, type(container))) + except Exception as e: + raise AttributeError("error when validating {} within container {}: {}".format(element, container, e)) + return True #element is valid, return True for boolean + +""" +def set_map(self, assign_path, value, force_match=False): + assigns = assign_path.split('.') + if len(assigns) == 1 and not (force_match and not validate(assigns[0], self)): + self.__setitem__(assigns[0], value) + return + crawler = self.__getitem__(assigns[0]) + for gi in assigns[1:-1]: + if not (force_match and not validate(gi, crawler)): + try: + crawler = crawler.__getitem__(gi) + except TypeError: # case for lists. + crawler = crawler.__getitem__(int(gi)) + if not (force_match and not validate(assigns[-1], crawler)): + try: + crawler.__setitem__(assigns[-1], value) + except TypeError: + crawler.__setitem__(int(assigns[-1]), value) + return +""" + + +def traverse(obj, path, force_match=False): + if len(path) == 1: + if not (force_match and not validate(path[0], obj)): + return obj + if not (force_match and not validate(path[0], obj)): + try: + crawler = obj.__getitem__(path[0]) + except TypeError: # use for indexing into a list or in case the dictionary entry? is an int. + crawler = obj.__getitem__(int(path[0])) + return traverse(crawler, path[1:], force_match) + +def set_map(self, assign_path, value, force_match=False): + assigns = assign_path.split('.') + traverse(self, assigns, force_match)[assigns[-1]] = value + +def get_map(self, assign_path, force_match=False): + assigns = assign_path.split('.') + return traverse(self, assigns, force_match)[assigns[-1]] + +def update_items(d, u, force_match = False): + for k, v in u.items(): + try: + force_match and validate(k, d) + if isinstance(v, collections.abc.Container): + d[k] = update_items(d.get(k), v, force_match) + else: + d[k] = v + except Exception as e: + raise AttributeError("Error when calling update_items with force_match, item {} does not exist".format(k)) + return d + +class NetpyneRunner(Runner): + """ + runner for netpyne + see class runner + mappings <- + """ + def __new__(cls, inherit=None, **kwargs): + _super = get_class(inherit) + + def __init__(self, netParams=None, cfg=None, **kwargs): + """ + NetpyneRunner constructor + + Parameters + ---------- + self - NetpyneRunner instance + netParams - optional netParams instance (defaults to None, created with method: get_NetParams) + cfg - optional SimConfig instance (defaults to None, created with method: get_SimConfig) + N.B. requires cfg with the update_cfg method. see in get_SimConfig: + self.cfg = type("Runner_SimConfig", (specs.SimConfig,), + {'__mappings__': self.mappings, + 'update_cfg': update_cfg})() + kwargs - Unused + """ + _super.__init__(self, **kwargs) + self.netParams = netParams + self.cfg = cfg + + def _set_inheritance(self, inherit): + """ + Method for changing inheritance of NetpyneRunner + see runtk.RUNNERS + Parameters + ---------- + self + inherit + """ + if inherit in runtk.RUNNERS: + cls = type(self) + cls.__bases__ = (runtk.RUNNERS[inherit],) + else: + raise KeyError("inheritance {} not found in runtk.RUNNERS (please check runtk.RUNNERS for valid strings...".format(inherit)) + + + def get_NetParams(self, netParamsDict=None): + """ + Creates / Returns a NetParams instance + Parameters + ---------- + self + netParamsDict - optional dictionary to create NetParams instance (defaults to None) + - to be called during initial function call only + + Returns + ------- + NetParams instance + + """ + if self.netParams: + return self.netParams + else: + from netpyne import specs + self.netParams = specs.NetParams(netParamsDict) + return self.netParams + + def test_mappings(self, mappings): + """ + Tests mappings for validity + + Parameters + ---------- + mappings - dictionary of mappings to test + + Returns + ------- + bool - True if mappings are valid, False otherwise + """ + for assign_path, value in mappings.items(): + try: + set_map(self, assign_path, value, force_match=True) + print("successfully assigned: cfg.{} with value: {}".format(assign_path, value)) + except Exception as e: + raise Exception("failed on mapping: cfg.{} with value: {}\n{}".format(assign_path, value, e)) + return True + + + def update_cfg(self, simConfigDict=None, force_match=False): #intended to take `cfg` instance as self + """ + Updates the SimConfig instance with mappings to the runner, called from a SimConfig instance + + Parameters + ---------- + self - specs (NetpyneRunner) SimConfig instance + simConfigDict - optional dictionary to update SimConfig instance (defaults to None) + - to be called during initial function call only + + Returns + ------- + None (updates SimConfig instance in place) + """ + if simConfigDict: + update_items(self, simConfigDict, force_match) + for assign_path, value in self.__mappings__.items(): + try: + set_map(self, assign_path, value) + except Exception as e: + raise Exception("failed on mapping: cfg.{} with value: {}\n{}".format(assign_path, value, e)) + + def get_SimConfig(self, simConfigDict=None): + """ + Creates / Returns a SimConfig instance + Parameters + ---------- + self - NetpyneRunner instance + simConfigDict - optional dictionary to create NetParams instance (defaults to None) + - to be called during initial function call only + + Returns + ------- + SimConfig instance + """ + if self.cfg: + if simConfigDict: + update_items(self.cfg,simConfigDict, force_match=False) + return self.cfg + else: + from netpyne import specs + self.cfg = type("Runner_SimConfig", (specs.SimConfig,), + {'__mappings__': self.mappings, + 'update_cfg': update_cfg, + 'update': update_cfg, + 'test_mappings': test_mappings})(simConfigDict) + return self.cfg + + def set_SimConfig(self): + """ + updates the SimConfig instance with mappings to the runner, called from a Runner instance + + Parameters + ---------- + self + """ + # assumes values are only in 'cfg' + for assign_path, value in self.mappings.items(): + try: + set_map(self, "cfg.{}".format(assign_path), value) + except Exception as e: + raise Exception("failed on mapping: cfg.{} with value: {}\n{}".format(assign_path, value, e)) + + def set_mappings(self, filter=''): + # arbitrary filter, can work with 'cfg' or 'netParams' + for assign_path, value in self.mappings.items(): + if filter in assign_path: + set_map(self, assign_path, value) + + return type("NetpyneRunner{}".format(str(_super.__name__)), (_super,), + {'__init__': __init__, + '_set_inheritance': _set_inheritance, + 'get_NetParams': get_NetParams, + 'NetParams': get_NetParams, + 'SimConfig': get_SimConfig, + 'get_SimConfig': get_SimConfig, + 'set_SimConfig': set_SimConfig, + 'set_mappings': set_mappings, + 'test_mappings': test_mappings})(**kwargs) # need to override __init__ or else will call parent + +# use this test_list to check set_map .... +test_list = { + 'lists_of_dicts': [ + {'a': 0, 'b': 1, 'c': 2}, + {'d': 3, 'e': 4, 'f': 5}, + {'g': 6, 'h': 7, 'i': 8} + ], + 'dict_of_lists': { + 'a': [0, 1, 2], + 'b': [3, 4, 5], + 'c': [6, 7, 8] + }, + 'dict_of_dicts': { + 0: {'a': 0, 'b': 1, 'c': 2}, + 1: {'d': 3, 'e': 4, 'f': 5}, + 2: {'g': 6, 'h': 7, 'i': 8} + } +} + +""" +Test statements +In [3]: set_map(test_list, 'lists_of_dicts.0.a', 'a', force_match = True) +In [4]: set_map(test_list, 'lists_of_dicts.0.a', 0, force_match = True) +In [5]: set_map(test_list, 'lists_of_dicts.0.d', 0, force_match = True) +""" \ No newline at end of file diff --git a/netpyne/batchtools/search.py b/netpyne/batchtools/search.py new file mode 100644 index 000000000..694a19a48 --- /dev/null +++ b/netpyne/batchtools/search.py @@ -0,0 +1,430 @@ +import ray +import pandas +import os +from ray import tune, train +from ray.air import session, RunConfig +from ray.tune.search.basic_variant import BasicVariantGenerator +from ray.tune.search import create_searcher, ConcurrencyLimiter, SEARCH_ALG_IMPORT +from netpyne.batchtools import runtk +from collections import namedtuple +from batchtk.raytk.search import ray_trial, LABEL_POINTER +from batchtk.utils import get_path +import numpy +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + + +choice = tune.choice +grid = tune.grid_search +uniform = tune.uniform + +def ray_optuna_search(dispatcher_constructor: Callable, # constructor for the dispatcher (e.g. INETDispatcher) + submit_constructor: Callable, # constructor for the submit (e.g. SHubmitSOCK) + run_config: Dict, # batch configuration, (keyword: string pairs to customize the submit template) + params: Dict, # search space (dictionary of parameter keys: tune search spaces) + label: Optional[str] = 'optuna_search', # label for the search + output_path: Optional[str] = '../batch', # directory for storing generated files + checkpoint_path: Optional[str] = '../ray', # directory for storing checkpoint files + max_concurrent: Optional[int] = 1, # number of concurrent trials to run at one time + batch: Optional[bool] = True, # whether concurrent trials should run synchronously or asynchronously + num_samples: Optional[int] = 1, # number of trials to run + metric: Optional[str|list|tuple] = "loss", # metric to optimize (this should match some key: value pair in the returned data + mode: Optional[str|list|tuple] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric + optuna_config: Optional[dict] = None, # additional configuration for the optuna search algorithm + ) -> namedtuple('Study', ['algo', 'results']): + """ + ray_optuna_search(...) + + Parameters + ---------- + dispatcher_constructor:Callable, # constructor for the dispatcher (e.g. INETDispatcher) + submit_constructor:Callable, # constructor for the submit (e.g. SHubmitSOCK) + run_config:Dict, # batch configuration, (keyword: string pairs to customize the submit template) + params:Dict, # search space (dictionary of parameter keys: tune search spaces) + label:Optional[str] = 'optuna_search', # label for the search + output_path:Optional[str] = '../batch', # directory for storing generated files + checkpoint_path:Optional[str] = '../ray', # directory for storing checkpoint files + max_concurrent:Optional[int] = 1, # number of concurrent trials to run at one time + batch:Optional[bool] = True, # whether concurrent trials should run synchronously or asynchronously + num_samples:Optional[int] = 1, # number of trials to run + metric:Optional[str] = "loss", # metric to optimize (this should match some key: value pair in the returned data + mode:Optional[str] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric + optuna_config:Optional[dict] = None, # additional configuration for the optuna search algorithm (incl. sampler, seed, etc.) + + Creates + ------- +