| hhcell (cell) |
| null (0) |
| morphology (morphology) |
| soma (id = 0) |
| null (1) |
| x = 0, y = 0, z = 0, diameter = 17.841242, radius = 8.920621E-6 m, xLength = 0 m, yLength = 0 m, zLength = 0 m |
| null (2) |
| x = 0, y = 0, z = 0, diameter = 17.841242, radius = 8.920621E-6 m, xLength = 0 m, yLength = 0 m, zLength = 0 m |
| soma_group (segmentGroup) |
| null (3) |
| bioPhys1 (biophysicalProperties) |
| null (4) |
| null (5) |
| value = -0.02 V |
| null (6) |
| value = -0.065 V |
| leak (channelDensity) |
| erev = -0.054387000000000005 V, condDensity = 3 kg^-1 m^-4 s^3 A^2 |
| passiveChan (ionChannelPassive) |
| conductance = 1.0E-11 S |
| null (7) |
| naChans (channelDensity) |
| erev = 0.05 V, condDensity = 1200 kg^-1 m^-4 s^3 A^2 |
| naChan (ionChannelHH) |
| conductance = 1.0E-11 S |
| null (8) |
| m (gateHHrates) |
| instances = 3 |
| null (9) |
| rate = 1000 s^-1, midpoint = -0.04 V, scale = 0.01 V |
| null (10) |
| rate = 4000 s^-1, midpoint = -0.065 V, scale = -0.018000000000000002 V |
| h (gateHHrates) |
| instances = 1 |
| null (11) |
| rate = 70 s^-1, midpoint = -0.065 V, scale = -0.02 V |
| null (12) |
| rate = 1000 s^-1, midpoint = -0.035 V, scale = 0.01 V |
| kChans (channelDensity) |
| erev = -0.077 V, condDensity = 360 kg^-1 m^-4 s^3 A^2 |
| kChan (ionChannelHH) |
| conductance = 1.0E-11 S |
| null (13) |
| n (gateHHrates) |
| instances = 4 |
| null (14) |
| rate = 100 s^-1, midpoint = -0.055 V, scale = 0.01 V |
| null (15) |
| rate = 125 s^-1, midpoint = -0.065 V, scale = -0.08 V |
| null (16) |
| value = 0.01 kg^-1 m^-4 s^4 A^2 |
| null (17) |
| null (18) |
| value = 0.3 kg^2 m^2 s^-3 A^-2 |
| cell |
| neuroLexId |
| State vars: v (voltage), spiking |
| initMembPot = biophysicalProperties/membraneProperties/initMembPotential/value |
| thresh = biophysicalProperties/membraneProperties/spikeThresh/value |
| surfaceArea = SUM OF: morphology/segments[*]/surfaceArea |
| totSpecCap = biophysicalProperties/totSpecCap |
| totCap = totSpecCap * surfaceArea |
| iChannels = biophysicalProperties/membraneProperties/totChanCurrent |
| iSyn = SUM OF: synapses[*]/i |
| iCa = biophysicalProperties/membraneProperties/iCa |
| caConc = biophysicalProperties/intracellularProperties/caConc |
| caConcExt = biophysicalProperties/intracellularProperties/caConcExt |
| v' = (iChannels + iSyn) / totCap |
| IF v .gt. thresh AND spiking .lt. 0.5 THEN |
| (spiking = 1) AND (EVENT: spike) |
| IF v .lt. thresh THEN |
| (spiking = 0) |
| Exposures: spiking, iChannels (current), iSyn (current), totSpecCap (specificCapacitance), surfaceArea (area), iCa (current), caConc (concentration), caConcExt (concentration), v (voltage) |
| baseCellMembPot |
| Exposures: v (voltage) |
| baseSpikingCell |
| baseCell |
| baseStandalone |
| notes |
| morphology |
| segment |
| Consts: LEN = 1 m |
| name |
| radDist = distal/radius |
| dx = distal/xLength |
| dy = distal/yLength |
| dz = distal/zLength |
| px = proximal/xLength |
| py = proximal/yLength |
| pz = proximal/zLength |
| length = sqrt(((dx - px) * (dx - px) + (dy - py) * (dy - py) + (dz - pz) * (dz - pz))/(LEN * LEN)) * LEN |
| Exposures: surfaceArea (area), radDist (length), length (length) |
| proximal |
| Consts: MICRON = 1.0E-6 m |
| point3DWithDiam |
| Params: x, y, z, diameter |
| Consts: MICRON = 1.0E-6 m |
| distal |
| Consts: MICRON = 1.0E-6 m |
| point3DWithDiam |
| Params: x, y, z, diameter |
| Consts: MICRON = 1.0E-6 m |
| segmentGroup |
| neuroLexId |
| member |
| segment |
| biophysicalProperties |
| totSpecCap = membraneProperties/totSpecCap |
| Exposures: totSpecCap (specificCapacitance) |
| membraneProperties |
| REQUIRES: surfaceArea (area) |
| totSpecCap = SUM OF: specificCapacitances[*]/specCap |
| totChanPopCurrent = SUM OF: populations[*]/i |
| totChanDensCurrentDensity = SUM OF: channelDensities[*]/iDensity |
| totChanCurrent = totChanPopCurrent + (totChanDensCurrentDensity * surfaceArea) |
| totChanPopCurrentCa = SUM OF: populations[ion='ca']/i |
| totChanDensCurrentDensityCa = SUM OF: channelDensities[ion='ca']/iDensity |
| iCa = totChanPopCurrentCa + (totChanDensCurrentDensityCa * surfaceArea) |
| Exposures: totChanCurrent (current), iCa (current), totSpecCap (specificCapacitance) |
| spikeThresh |
| Params: value (voltage) |
| initMembPotential |
| Params: value (voltage) |
| channelDensity |
| Params: erev (voltage) |
| Consts: vShift = 0 V |
| REQUIRES: v (voltage) |
| segmentGroup |
| ion |
| channelf = ionChannel/fopen |
| gDensity = condDensity * channelf |
| iDensity = gDensity * (erev - v) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensityCond |
| Params: condDensity (conductanceDensity) |
| REQUIRES: v (voltage) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensity |
| REQUIRES: v (voltage) |
| Exposures: iDensity (currentDensity) |
| ionChannelPassive |
| REQUIRES: v (voltage) |
| species |
| fopen = 1 |
| g = conductance |
| Exposures: g (conductance), fopen |
| ionChannel |
| REQUIRES: v (voltage) |
| conductanceScale = PRODUCT OF: conductanceScaling[*]/factor |
| fopen0 = PRODUCT OF: gates[*]/fcond |
| fopen = conductanceScale * fopen0 |
| g = conductance * fopen |
| Exposures: g (conductance), fopen |
| ionChannelHH |
| REQUIRES: v (voltage) |
| species |
| conductanceScale = PRODUCT OF: conductanceScaling[*]/factor |
| fopen0 = PRODUCT OF: gates[*]/fcond |
| fopen = conductanceScale * fopen0 |
| g = conductance * fopen |
| Exposures: g (conductance), fopen |
| baseIonChannel |
| Params: conductance (conductance) |
| REQUIRES: v (voltage) |
| neuroLexId |
| Exposures: g (conductance), fopen |
| notes |
| channelDensity |
| Params: erev (voltage) |
| Consts: vShift = 0 V |
| REQUIRES: v (voltage) |
| segmentGroup |
| ion |
| channelf = ionChannel/fopen |
| gDensity = condDensity * channelf |
| iDensity = gDensity * (erev - v) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensityCond |
| Params: condDensity (conductanceDensity) |
| REQUIRES: v (voltage) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensity |
| REQUIRES: v (voltage) |
| Exposures: iDensity (currentDensity) |
| ionChannelHH |
| REQUIRES: v (voltage) |
| species |
| conductanceScale = PRODUCT OF: conductanceScaling[*]/factor |
| fopen0 = PRODUCT OF: gates[*]/fcond |
| fopen = conductanceScale * fopen0 |
| g = conductance * fopen |
| Exposures: g (conductance), fopen |
| baseIonChannel |
| Params: conductance (conductance) |
| REQUIRES: v (voltage) |
| neuroLexId |
| Exposures: g (conductance), fopen |
| notes |
| gateHHrates |
| State vars: q |
| rateScale = PRODUCT OF: q10Settings[*]/q10 |
| alpha = forwardRate/r |
| beta = reverseRate/r |
| fcond = q^instances |
| inf = alpha/(alpha+beta) |
| tau = 1/((alpha+beta) * rateScale) |
| q' = (inf - q) / tau |
| Exposures: alpha (per_time), beta (per_time), tau (time), inf, rateScale, fcond, q |
| gate |
| Exposures: fcond, q |
| baseGate |
| Params: instances |
| Exposures: fcond, q |
| HHExpLinearRate |
| REQUIRES: v (voltage) |
| x = (v - midpoint) / scale |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| HHExpRate |
| REQUIRES: v (voltage) |
| r = rate * exp((v - midpoint)/scale) |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| gateHHrates |
| State vars: q |
| rateScale = PRODUCT OF: q10Settings[*]/q10 |
| alpha = forwardRate/r |
| beta = reverseRate/r |
| fcond = q^instances |
| inf = alpha/(alpha+beta) |
| tau = 1/((alpha+beta) * rateScale) |
| q' = (inf - q) / tau |
| Exposures: alpha (per_time), beta (per_time), tau (time), inf, rateScale, fcond, q |
| gate |
| Exposures: fcond, q |
| baseGate |
| Params: instances |
| Exposures: fcond, q |
| HHExpRate |
| REQUIRES: v (voltage) |
| r = rate * exp((v - midpoint)/scale) |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| HHSigmoidRate |
| REQUIRES: v (voltage) |
| r = rate / (1 + exp(0 - (v - midpoint)/scale)) |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| channelDensity |
| Params: erev (voltage) |
| Consts: vShift = 0 V |
| REQUIRES: v (voltage) |
| segmentGroup |
| ion |
| channelf = ionChannel/fopen |
| gDensity = condDensity * channelf |
| iDensity = gDensity * (erev - v) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensityCond |
| Params: condDensity (conductanceDensity) |
| REQUIRES: v (voltage) |
| Exposures: gDensity (conductanceDensity), iDensity (currentDensity) |
| baseChannelDensity |
| REQUIRES: v (voltage) |
| Exposures: iDensity (currentDensity) |
| ionChannelHH |
| REQUIRES: v (voltage) |
| species |
| conductanceScale = PRODUCT OF: conductanceScaling[*]/factor |
| fopen0 = PRODUCT OF: gates[*]/fcond |
| fopen = conductanceScale * fopen0 |
| g = conductance * fopen |
| Exposures: g (conductance), fopen |
| baseIonChannel |
| Params: conductance (conductance) |
| REQUIRES: v (voltage) |
| neuroLexId |
| Exposures: g (conductance), fopen |
| notes |
| gateHHrates |
| State vars: q |
| rateScale = PRODUCT OF: q10Settings[*]/q10 |
| alpha = forwardRate/r |
| beta = reverseRate/r |
| fcond = q^instances |
| inf = alpha/(alpha+beta) |
| tau = 1/((alpha+beta) * rateScale) |
| q' = (inf - q) / tau |
| Exposures: alpha (per_time), beta (per_time), tau (time), inf, rateScale, fcond, q |
| gate |
| Exposures: fcond, q |
| baseGate |
| Params: instances |
| Exposures: fcond, q |
| HHExpLinearRate |
| REQUIRES: v (voltage) |
| x = (v - midpoint) / scale |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| HHExpRate |
| REQUIRES: v (voltage) |
| r = rate * exp((v - midpoint)/scale) |
| Exposures: r (per_time) |
| baseHHRate |
| Params: rate (per_time), midpoint (voltage), scale (voltage) |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| baseVoltageDepRate |
| REQUIRES: v (voltage) |
| Exposures: r (per_time) |
| specificCapacitance |
| Params: value (specificCapacitance) |
| segmentGroup |
| specCap = value |
| Exposures: specCap (specificCapacitance) |
| intracellularProperties |
| caConc = SUM OF: speciesList[ion='ca']/concentration |
| caConcExt = SUM OF: speciesList[ion='ca']/extConcentration |
| Exposures: caConc (concentration), caConcExt (concentration) |
| resistivity |
| Params: value (resistivity) |
| segmentGroup |
{}
'.format(key),layout=ipywidgets.Layout(border='solid 1px black',width='60%')) + plt.xlabel("Time (ms)") + plt.ylabel("") + plt.grid(True,linestyle="--") + + #plt.xlim(min(self.nmlOutput["t"]),max(self.nmlOutput["t"])) + #plt.ylim(min(self.nmlOutput[key]),max(self.nmlOutput[key])) + + plt.plot(self.nmlOutput["t"], self.nmlOutput[key], linewidth=1) + + plotBox=ipywidgets.VBox([htmlBox_tittle,fig.canvas]) + + display(plotBox) +#end of class \ No newline at end of file diff --git a/notebooks/NeuroML_Generic_ET/ui_widget.py b/notebooks/NeuroML_Generic_ET/ui_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..b2014cd9f3b43cb5572f7e14f93aa6144227b7e8 --- /dev/null +++ b/notebooks/NeuroML_Generic_ET/ui_widget.py @@ -0,0 +1,33 @@ +import ipywidgets + +#define interactive textboxes for loading source path and LEMS filename +def loadpath(sourcePath,LEMS_file): + return sourcePath,LEMS_file + +sourcePath_tb = ipywidgets.Text(value='../../Tutorial/Source/',placeholder='Path to NeuroML Source Dirctory',description='Path:',disabled=False,layout=ipywidgets.Layout(width='80%')) +LEMS_file_tb = ipywidgets.Text(value='LEMS_HH_Simulation.xml',placeholder='LEMS Filename',description='LEMS:',disabled=False,layout=ipywidgets.Layout(width='80%')) + +header = ipywidgets.HTML(value="Enter Path to NeuroML Model and LEMS filename below: ") +loader = ipywidgets.interactive(loadpath, sourcePath=sourcePath_tb, LEMS_file=LEMS_file_tb) + +#define run button +update_button = ipywidgets.Button(description="Update Model",button_style='info',tooltip='Update NeuroML file with current widget inputs') + +#define run button +run_button = ipywidgets.Button(description="Run NeuroML",button_style='info',tooltip='Execute NeuroML Model with saved inputs') + +#define validate button +validate_button = ipywidgets.Button(description="Validate Model",button_style='warning',tooltip='Validate NeuroML Model for above inputs') + +#arrange run and validate button in a row +buttons=ipywidgets.HBox([update_button,validate_button, run_button]) + +#define plot button +plot_button = ipywidgets.Button(description="Plot Output",button_style='success',tooltip='Plot outputs recorded in LEMS file') + + + + + + + diff --git a/notebooks/NeuroML_Generic_libneuro/NeuroML_Notebook_libnml.ipynb b/notebooks/NeuroML_Generic_libneuro/NeuroML_Notebook_libnml.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e63f2f83fbe72aa841804c43716d46df75e731b7 --- /dev/null +++ b/notebooks/NeuroML_Generic_libneuro/NeuroML_Notebook_libnml.ipynb @@ -0,0 +1,145 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "789a1930", + "metadata": {}, + "source": [ + "# Generic Notebook for NeuroML Models" + ] + }, + { + "cell_type": "markdown", + "id": "20c97415", + "metadata": {}, + "source": [ + "## *Source path and filename*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce7cc8dd", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "import ipywidgets\n", + "import ui_widget\n", + "from importlib.machinery import SourceFileLoader\n", + "%matplotlib widget\n", + "\n", + "#widget to read input files\n", + "display(ui_widget.header,ui_widget.loader)" + ] + }, + { + "cell_type": "markdown", + "id": "8f2d35a3", + "metadata": {}, + "source": [ + "## *Read NeuroML files and build dashboard*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "079c703a", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "# get path and filename from above widget----------------------------------------------------#\n", + "path2source = ui_widget.loader.result[0]\n", + "fname_LEMS = ui_widget.loader.result[1]\n", + "fname_net = ui_widget.loader.result[2]\n", + "\n", + "# imports the python module-----------------------------------------------------------------#\n", + "nmlPython = SourceFileLoader(\"nml2jupyter_ver3.py\",\"nml2jupyter_ver3.py\").load_module()\n", + "runner = nmlPython.nml2jupyter(path2source, fname_LEMS, fname_net)\n", + "\n", + "nml_doc=runner.loadnml()\n", + "runner.createTabWithAccordions(nml_doc) #create GUI with tabs (including LEMS) and nested accordions\n", + "#display(runner.createAccordions(nml_doc,'NML Document')) #create only nested accordions\n", + "runner.loadGUI(nml_doc) #load buttons and log/plot window" + ] + }, + { + "cell_type": "markdown", + "id": "aa709aa7", + "metadata": {}, + "source": [ + "## *INFO method output*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7989feba", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "#for key,values in nml_doc.cells[0].info(True,'dict').items():\n", + "# if values['members'] is None or (isinstance(values['members'], list) and len(values['members']) == 0): continue\n", + "# print(key,' = ', values['members'])" + ] + }, + { + "cell_type": "markdown", + "id": "219b7931", + "metadata": {}, + "source": [ + "## *Exploring sub-model (examples)*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "02d4dbdc", + "metadata": {}, + "outputs": [], + "source": [ + "#with first tab as LEMS simulation parameters (default)\n", + "#runner.createTabWithAccordions(nml_doc.networks[0]) #pass NeuroML class object" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79f7b597", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "#only NeuroML model (no LEMS details included)\n", + "#runner.createAccordions(nml_doc.networks[0],'Networks') #pass NeuroML class object and a title for parent accordion" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/NeuroML_Generic_libneuro/nml2jupyter_ver3.py b/notebooks/NeuroML_Generic_libneuro/nml2jupyter_ver3.py new file mode 100644 index 0000000000000000000000000000000000000000..dc71ad1e5a5722ad21cc360332817d83cca8905e --- /dev/null +++ b/notebooks/NeuroML_Generic_libneuro/nml2jupyter_ver3.py @@ -0,0 +1,240 @@ +import os +import pylab as plt +import numpy as np +import ui_widget +import ipywidgets +from pyneuroml import pynml +from neuroml.loaders import read_neuroml2_file +import neuroml.writers as writers + +#python helper class for updating NeuroML files and running it from Jupyter Notebook +class nml2jupyter(): + + def __init__(self, path2source, fname_LEMS, fname_net): + + self.path2source = path2source + self.fname_LEMS = fname_LEMS + self.fname_net = fname_net + self.nml_file = 'NeuroMLProject.nml' + + #Function to load neuroml file in python object + def loadnml(self): + pathfilename=os.path.join(self.path2source, self.fname_net) + nml_doc= read_neuroml2_file(pathfilename, include_includes=True,already_included=[]) + return nml_doc + + #function to write NeuroML file based on widget inputs + def writeNMLinputFile(self,nml_doc): + writers.NeuroMLWriter.write(nml_doc, self.nml_file) + display("Written in NeuroML2 format to : " + self.nml_file) + + + #Fuction to create accordions for given neuroml object + def createAccordions(self,nmlObj,title): + mydict=nmlObj.info(True,return_format='dict') + emptyKeys=[] + subwidget_list=[] + textBoxList=[] + + #Two loops to keep text boxes on top and accordion on bottom + #Need to find alternate way to avoid looping twice + + #Loop 1 + #create text box widgets for values of dictionary of type str, int or float + #make a list of empty keys + for key,values in mydict.items(): + # check if the member is set to None + # if it's a container (list), it will not be set to None, it + # will be empty, [] + # if it's a scalar, it will be set to None or to a non + # container value + if values['members'] is None or (isinstance(values['members'], list) and len(values['members']) == 0): + emptyKeys.append(key) + continue + if isinstance(values['members'],str) or isinstance(values['members'],int) or isinstance(values['members'],float): + textBox_key = ipywidgets.Text(value=key,disabled=True,layout=ipywidgets.Layout(width='20%')) + textBox_value = ipywidgets.Text(value=str(values['members']),layout=ipywidgets.Layout(width='50%')) + textBoxList.append(ipywidgets.HBox([textBox_key, textBox_value])) + if (key=='id'): title_id=values['members'] + + #remove empty keys from dicitonary (to reduce iteration in 2nd loop) + for key in emptyKeys: + mydict.pop(key) + + #Loop 2 + #create sub-accordions for list of values + for key,values in mydict.items(): + if isinstance(values['members'],str) or isinstance(values['members'],int) or isinstance(values['members'],float): continue + if isinstance(values['members'],list): + for idx, val in enumerate(values['members']): + if isinstance(val,str) or isinstance(val,int) or isinstance(val,float): + textBox_key = ipywidgets.Text(value=key,disabled=True,layout=ipywidgets.Layout(width='20%')) + textBox_value = ipywidgets.Text(value=str(val),layout=ipywidgets.Layout(width='50%')) + textBoxList.append(ipywidgets.HBox([textBox_key, textBox_value])) + if (key=='id'): title_id=values['members'] + else: + child_accord=self.createAccordions(val,key) + textBoxList.append(child_accord) + else: + child_accord=self.createAccordions(values['members'],key) + textBoxList.append(child_accord) + + subwidget_list.append(ipywidgets.VBox(textBoxList)) + accordion = ipywidgets.Accordion(children=subwidget_list, selected_index=None) + try: + title_with_id = title + ' (' + title_id + ') ' + accordion.set_title(0, title_with_id) + except: + accordion.set_title(0, title) + + return accordion + + #Function to load LEMS life in python object then get component list to create accordions + def createAccordionsLEMS(self): + pathfilename=os.path.join(self.path2source,self.fname_LEMS) + lems_doc=pynml.read_lems_file(pathfilename) + mydict=lems_doc.get_component_list() + accordList=[] + component=['id', 'type', 'parameters', 'parent_id'] + for key,values in mydict.items(): + textBoxList=[] + for attr in component: + val = getattr(mydict[key],attr) + if isinstance(val,dict): + for k,v in val.items(): + textBox_key = ipywidgets.Text(value=k,disabled=True,layout=ipywidgets.Layout(width='20%')) + textBox_value = ipywidgets.Text(value=v,layout=ipywidgets.Layout(width='50%')) + textBoxList.append(ipywidgets.HBox([textBox_key, textBox_value])) + continue + textBox_key = ipywidgets.Text(value=attr,disabled=True,layout=ipywidgets.Layout(width='20%')) + textBox_value = ipywidgets.Text(value=val,layout=ipywidgets.Layout(width='50%')) + textBoxList.append(ipywidgets.HBox([textBox_key, textBox_value])) + accordList.append(ipywidgets.VBox(textBoxList)) + accordion = ipywidgets.Accordion(children=accordList, selected_index=None) + for i,key in enumerate(mydict.keys()): + accordion.set_title(i,key) + return accordion + + #Function to create GUI by nesting accordions with first level of neruoml object as Tabs + def createTabWithAccordions(self,nml_doc): + parent=nml_doc.info(True,return_format='dict') + + masterTab=ipywidgets.Tab() + masterTab_titles=[] + masterTab_child=[] + #create LEMS tab for simulation parameters (using get_component_list) + lemsTab=self.createAccordionsLEMS() + masterTab_child.append(lemsTab) + masterTab_titles.append('LEMS') + for key,values in parent.items(): + if values['members'] is None or (isinstance(values['members'], list) and len(values['members']) == 0): continue #skip empty elements + + sub_child=[] + if isinstance(values['members'],list): + for val in values['members']: + sub_child.append(self.createAccordions(val,key)) + elif isinstance(values['members'],str) or isinstance(values['members'],int) or isinstance(values['members'],float): + sub_child.append(ipywidgets.Text(value=str(values['members']))) + else: + sub_child.append(self.createAccordions(values['members'],key)) + + masterTab_child.append(ipywidgets.VBox(sub_child)) + masterTab_titles.append(key) + + masterTab.children=masterTab_child + for i in range(len(masterTab_titles)): + masterTab.set_title(i,masterTab_titles[i]) + + display(masterTab) + + #function to setup full dashboard/gui + def loadGUI(self,nml_doc): + + #function to run NeuroML with given inputs + def runNMLmodel(b): + out_log.clear_output() + out_plot.clear_output() + out_validStatus.clear_output() + with out_log: + display('Running NeuroML Model...') + LEMS_file=os.path.join(self.path2source, self.fname_LEMS) + self.nmlOutput = pynml.run_lems_with_jneuroml(LEMS_file, nogui=True, load_saved_data=True) + #shell_cmd=['pynml', LEMS, LEMSoption] + #subprocess.run(shell_cmd) + display('Completed !!!') + + #function to validate NeuroML model + def validateNMLmodel(b): + out_log.clear_output() + out_validStatus.clear_output() + with out_log: + display('Validating NeuroML Input File...') + #pathfilename=os.path.join(self.path2source, self.nml_file) + checkStatus=pynml.validate_neuroml2(self.nml_file) + #shell_cmd=['pynml', pathfilename,'-validate'] + #subprocess.run(shell_cmd) + if checkStatus==True: + valid_widget=ipywidgets.Valid(value=True,description='') + with out_validStatus: + display(ipywidgets.HBox([ipywidgets.HTML(value=self.nml_file,disabled=True),valid_widget])) + else: + valid_widget=ipywidgets.Valid(value=False,description='') + with out_validStatus: + display(ipywidgets.HBox([ipywidgets.HTML(value=self.nml_file,disabled=True),valid_widget])) + display('Completed !!!') + + #function to display plot in notebook + def plotOutput(b): + out_plot.clear_output() + with out_plot: + self.plotData() + + #function to update NeuroML files from widget inputs + def updateNMLfiles(b): + out_log.clear_output() + out_plot.clear_output() + out_validStatus.clear_output() + with out_log: + #display('Updating NeuroML Files from GUI inputs...') + display('Writing NeuroML python model to file ') + #display(type(self.filelist),len(self.filelist)) + #display(type(self.trees),len(self.trees)) + self.writeNMLinputFile(nml_doc) + display('Completed !!!') + + #output windows + out_log = ipywidgets.Output(layout={'border': '1px solid'}) #for displaying output log from NeuroMl execution + out_plot = ipywidgets.Output() #for displaying plots + out_validStatus = ipywidgets.Output() #for displaying valid widgets after running validate button + + ui_widget.run_button.on_click(runNMLmodel) + ui_widget.validate_button.on_click(validateNMLmodel) + ui_widget.plot_button.on_click(plotOutput) + ui_widget.update_button.on_click(updateNMLfiles) + + display(ui_widget.buttons,out_validStatus,out_log,ui_widget.plot_button,out_plot) + + #function to plot data generated by NeuroML + def plotData(self): + plt.close('all') + for key in self.nmlOutput.keys(): + if key == "t": + continue + plt.ioff() #suppress plot console window (plot only at display call) + fig=plt.figure(figsize=(8,2)) + fig.canvas.header_visible = False + + htmlBox_tittle = ipywidgets.HTML(value='{}
'.format(key),layout=ipywidgets.Layout(border='solid 1px black',width='60%')) + plt.xlabel("Time (ms)") + plt.ylabel("") + plt.grid(True,linestyle="--") + + #plt.xlim(min(self.nmlOutput["t"]),max(self.nmlOutput["t"])) + #plt.ylim(min(self.nmlOutput[key]),max(self.nmlOutput[key])) + + plt.plot(self.nmlOutput["t"], self.nmlOutput[key], linewidth=1) + + plotBox=ipywidgets.VBox([htmlBox_tittle,fig.canvas]) + + display(plotBox) +#end of class \ No newline at end of file diff --git a/notebooks/NeuroML_Generic_libneuro/ui_widget.py b/notebooks/NeuroML_Generic_libneuro/ui_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..04fe329db43aa8c2a77813c13b3a5d7e4dc8591b --- /dev/null +++ b/notebooks/NeuroML_Generic_libneuro/ui_widget.py @@ -0,0 +1,27 @@ +import ipywidgets + +#define interactive textboxes for loading source path and LEMS filename +def loadpath(sourcePath, LEMS_file, net_file): + return sourcePath, LEMS_file, net_file + +sourcePath_tb = ipywidgets.Text(value='../../Tutorial/Source/',placeholder='Path to NeuroML Source Directory',description='Path:',disabled=False,layout=ipywidgets.Layout(width='80%')) +LEMS_file_tb = ipywidgets.Text(value='LEMS_HH_Simulation.xml',placeholder='LEMS Filename',description='LEMS:',disabled=False,layout=ipywidgets.Layout(width='80%')) +network_file_tb = ipywidgets.Text(value='HHCellNetwork.net.nml',placeholder='Network Filename',description='Network:',disabled=False,layout=ipywidgets.Layout(width='80%')) + +header = ipywidgets.HTML(value="Enter Path to NeuroML Model and LEMS filename below: ") +loader = ipywidgets.interactive(loadpath, sourcePath=sourcePath_tb, LEMS_file=LEMS_file_tb, net_file=network_file_tb) + +#define run button +update_button = ipywidgets.Button(description="Update Model",button_style='info',tooltip='Update NeuroML file with current widget inputs') + +#define run button +run_button = ipywidgets.Button(description="Run NeuroML",button_style='info',tooltip='Execute NeuroML Model with saved inputs') + +#define validate button +validate_button = ipywidgets.Button(description="Validate Model",button_style='warning',tooltip='Validate NeuroML Model for above inputs') + +#arrange run and validate button in a row +buttons=ipywidgets.HBox([update_button,validate_button, run_button]) + +#define plot button +plot_button = ipywidgets.Button(description="Plot Output",button_style='success',tooltip='Plot outputs recorded in LEMS file') \ No newline at end of file diff --git a/notebooks/NeuroML_HH_version/NeuroML_Notebook_HH.ipynb b/notebooks/NeuroML_HH_version/NeuroML_Notebook_HH.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..3849cb6606b4091d41dfa41a758dd735aec4088c --- /dev/null +++ b/notebooks/NeuroML_HH_version/NeuroML_Notebook_HH.ipynb @@ -0,0 +1,212 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# NeuroML model (HH) in Jupyter Notebook " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## *Source path and filenames for NeuroML Model*" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import ipywidgets\n", + "import ui_widget\n", + "from importlib.machinery import SourceFileLoader\n", + "%matplotlib widget\n", + "\n", + "#path for source directory to read, write and execute NeuroML--------------------------------------------#\n", + "path2source = \"../../Tutorial/Source/\"\n", + "\n", + "#filenames for NeuroML file under above direcotry--------------------------------------------------------#\n", + "fname_cellNML = \"hhcell.cell.nml\"\n", + "fname_netNML = \"HHCellNetwork.net.nml\"\n", + "fname_LEMS = \"LEMS_HH_Simulation.xml\"\n", + "fname_NML_output = \"hh_forJupyterNotebook.dat\"\n", + "\n", + "# imports the module from the given path-----------------------------------------------------------------#\n", + "nmlPython = SourceFileLoader(\"nml2jupyter_ver1.py\",\"nml2jupyter_ver1.py\").load_module()\n", + "runner = nmlPython.nml2jupyter(path2source, fname_cellNML, fname_netNML, fname_LEMS, fname_NML_output)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## *Functions from python class to interact with NeuroML Model*" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "#function to write inputs for NeuroML--------------------------------------------------------------------#\n", + "def writeInputs(C_m, g_Na, g_K, g_L, E_Na, E_K, E_L, t_0, t_n, delta_t, I_inj_max, I_inj_width, I_inj_trans):\n", + " ui_widget.highlight_slider()\n", + " runner.writeNMLinputFile(C_m, g_Na, g_K, g_L, E_Na, E_K, E_L, t_0, t_n, delta_t, I_inj_max, I_inj_width, I_inj_trans)\n", + " #write nml files using slider inputs\n", + " \n", + "#function to run NeuroML with given inputs and plot results----------------------------------------------#\n", + "def runNMLmodel(b):\n", + " out_log.clear_output()\n", + " out_plot.clear_output()\n", + " #run neuroML (write output in dat file)\n", + " with out_log:\n", + " LEMSoption=\" -nogui\"\n", + " LEMS=os.path.join(path2source, fname_LEMS, LEMSoption)\n", + " !pynml $LEMS\n", + " \n", + " #load dat file and plot\n", + " with out_plot:\n", + " runner.plotData()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## *Set-up widgets and interact with NeuroML Model via Python Class*" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b76dadc5fd8d4aac8d517b238f523d73", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "VBox(children=(HBox(children=(HTMLMath(value=' Membrane Capacitance, \\\\(\\\\mu{F}/cm^2\\\\)'),)), HBox(chil…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "e4fefa0743aa45058f6486b1f4e0d17e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Output()" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "3861387971ea44169924387d1abc959c", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Button(button_style='success', description='Run NeuroML', style=ButtonStyle(), tooltip='Execute NeuroML Model …" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6821aaf620de4a5d95907b9ef027537d", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Output(layout=Layout(border='1px solid'))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "71d8dac020534a6d956ad1b2f944af2f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Output()" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#define and connect run button for NeuroML-----------------------------------------------------------------#\n", + "ui_widget.run_button.on_click(runNMLmodel)\n", + "out_log = ipywidgets.Output(layout={'border': '1px solid'}) #for displaying output log from NeuroMl execution\n", + "out_plot = ipywidgets.Output() #for displaying plots \n", + "\n", + "#create input widget and call writeInputs to generate/update NeuroML files---------------------------------#\n", + "wid_inputs=ipywidgets.interactive_output(writeInputs,{'C_m':ui_widget.slider_capacitance,\n", + " 'g_Na':ui_widget.slider_cond_Na, 'g_K':ui_widget.slider_cond_K, 'g_L':ui_widget.slider_cond_L, \n", + " 'E_Na':ui_widget.slider_pot_Na, 'E_K':ui_widget.slider_pot_K, 'E_L':ui_widget.slider_pot_L,\n", + " 't_0':ui_widget.time_start, 't_n':ui_widget.time_end, 'delta_t':ui_widget.time_step, \n", + " 'I_inj_max':ui_widget.slider_amplitude,'I_inj_width':ui_widget.slider_width,'I_inj_trans':ui_widget.slider_translation})\n", + "\n", + "#display all the widgets and button outputs----------------------------------------------------------------#\n", + "display(ui_widget.modelInputs,wid_inputs,ui_widget.run_button,out_log,out_plot)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/NeuroML_HH_version/nml2jupyter_ver1.py b/notebooks/NeuroML_HH_version/nml2jupyter_ver1.py new file mode 100644 index 0000000000000000000000000000000000000000..57055c833c0704feeaaac9a8d80249909fa57db1 --- /dev/null +++ b/notebooks/NeuroML_HH_version/nml2jupyter_ver1.py @@ -0,0 +1,144 @@ +import pylab as plt +import numpy as np +import os +import re + +#python helper class for updating NeuroML files and running it from Jupyter Notebook +class nml2jupyter(): + + def __init__(self, path2source, fname_cellNML, fname_netNML, fname_LEMS, fname_NML_output): + + self.path2source = path2source + self.fname_cellNML = fname_cellNML + self.fname_netNML = fname_netNML + self.fname_LEMS = fname_LEMS + self.fname_NML_output = fname_NML_output + + #function to update existing NeuroML file based on widget inputs + def writeNMLinputFile(self,C_m, g_Na, g_K, g_L, E_Na, E_K, E_L, t_0, t_n, delta_t, I_inj_max, I_inj_width, I_inj_trans): + + """ + RegEx used for search and replace + + [\s] - handling blankspace around numbers + ( ) - captruing group (only for group numbering) + (?:) - non-captruing group (only for group numbering) + ([-+]?(?:\d*[.])?\d+) - capturing float and integers (problem with no number after period e.g. 25. ) + + """ + #update CELL NML-----------------------------------------------------------------# + filename=os.path.join(self.path2source, self.fname_cellNML) + tempFile=open(filename + ".temp","w") #temporary file + + pattern1= r'(specificCapacitance[\s]+value[\s]*=[\s]*"[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*uF_per_cm2[\s]*")' #handles white spaces everywhere + pattern2= r'(channelDensity id="naChans" ionChannel="naChan" condDensity="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mS_per_cm2" erev="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mV" ion="na")' + pattern3= r'(channelDensity id="kChans" ionChannel="kChan" condDensity="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mS_per_cm2" erev="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mV" ion="k")' + pattern4= r'(channelDensity id="leak" ionChannel="passiveChan" condDensity="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mS_per_cm2" erev="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*mV" ion="non_specific")' + with open(filename) as file: + lines = file.readlines() + for line in lines: + line=re.sub(pattern1,r'\g<1>%s\g<3>' %C_m,line) + line=re.sub(pattern2,r'\g<1>%s\g<3>%s\g<5>' %(g_Na,E_Na),line) + line=re.sub(pattern3,r'\g<1>%s\g<3>%s\g<5>' %(g_K,E_K),line) + line=re.sub(pattern4,r'\g<1>%s\g<3>%s\g<5>' %(g_L,E_L),line) + tempFile.write(line) + tempFile.close() + try: + os.rename(filename + ".temp", filename) + except: + os.remove(filename) + os.rename(filename + ".temp", filename) + + #update NETWORK NML-------------------------------------------------------------# + filename=os.path.join(self.path2source, self.fname_netNML) + tempFile=open(filename + ".temp","w") #temporary file + + pattern1=r'(pulseGenerator id="pulseGen1" delay="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" duration="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" amplitude="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*nA")' + pattern2=r'(pulseGenerator id="pulseGen2" delay="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" duration="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" amplitude="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*nA")' + with open(filename) as file: + lines = file.readlines() + for line in lines: + line=re.sub(pattern1,r'\g<1>%s\g<3>%s\g<5>%s\g<7>' %(I_inj_trans,I_inj_width,I_inj_max),line) + line=re.sub(pattern2,r'\g<1>%s\g<3>%s\g<5>%s\g<7>' %(300,100,0.0),line) #switching off second pulse + tempFile.write(line) + tempFile.close() + try: + os.rename(filename + ".temp", filename) + except: + os.remove(filename) + os.rename(filename + ".temp", filename) + + #update LEMS--------------------------------------------------------------------# + filename=os.path.join(self.path2source, self.fname_LEMS) + tempFile=open(filename + ".temp","w") #temporary file + + pattern1=r'(Simulation id="sim1" length="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" step="[\s]*)([-+]?(?:\d*[.])?\d+)([\s]*ms" target="HHCellNetwork")' + with open(filename) as file: + lines = file.readlines() + for line in lines: + line=re.sub(pattern1,r'\g<1>%s\g<3>%s\g<5>' %(t_n,delta_t),line) + tempFile.write(line) + tempFile.close() + try: + os.rename(filename + ".temp", filename) + except: + os.remove(filename) + os.rename(filename + ".temp", filename) + + #function to plot data generated by NeuroML + def plotData(self): + + #read data file and import columns as array using numpy + data = np.loadtxt(self.fname_NML_output) + t=data[:,0]*1000 #convert to ms + V=data[:,1]*1000 #convert to mV + m=data[:,2] + h=data[:,3] + n=data[:,4] + ina=data[:,5] + ik=data[:,6] + il=data[:,7] + i_inj1=data[:,8]*10**9 #convert to nA + i_inj2=data[:,9]*10**9 #convert to nA + + #plt.rcParams['figure.figsize'] = [12, 8] + #plt.rcParams['font.size'] = 15 + #plt.rcParams['legend.fontsize'] = 12 + #plt.rcParams['legend.loc'] = "upper right" + + #fig=plt.figure() + plt.ioff() #suppress plot console window (plot only at display call) + fig=plt.figure() + fig.canvas.header_visible = False + + ax1 = plt.subplot(4,1,1) + plt.xlim([np.min(t),np.max(t)]) #for all subplots + plt.title('Hodgkin-Huxley Neuron') + #i_inj_values = [self.I_inj(t) for t in t] + plt.plot(t, i_inj1, 'k') + plt.plot(t, i_inj2, 'b') + plt.ylabel('$I_{inj}$ (nA)') + + plt.subplot(4,1,2, sharex = ax1) + plt.plot(t, ina, 'c', label='$I_{Na}$') + plt.plot(t, ik, 'y', label='$I_{K}$') + plt.plot(t, il, 'm', label='$I_{L}$') + plt.ylabel('Current') + plt.legend(loc = "upper right") + + plt.subplot(4,1,3, sharex = ax1) + plt.plot(t, m, 'r', label='m') + plt.plot(t, h, 'g', label='h') + plt.plot(t, n, 'b', label='n') + plt.ylabel('Gating Value') + plt.legend(loc = "upper right") + + plt.subplot(4,1,4, sharex = ax1) + plt.plot(t, V, 'k') + plt.ylabel('V (mV)') + plt.xlabel('t (ms)') + #plt.ylim(-1, 40) + + plt.tight_layout() + plt.show() +#end of class \ No newline at end of file diff --git a/notebooks/NeuroML_HH_version/ui_widget.py b/notebooks/NeuroML_HH_version/ui_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..e68a39e02ebf0059e6d798bfd60da44f0b12315c --- /dev/null +++ b/notebooks/NeuroML_HH_version/ui_widget.py @@ -0,0 +1,134 @@ +import ipywidgets +import numpy as np +import pylab as plt +from traitlets import link + +#default input parameters +default_capacitance = 1 +default_cond_Na = 120 +default_cond_K = 36 +default_cond_L = 0.3 +default_pot_Na = 50 +default_pot_K = -77 +default_pot_L = -54.387 +default_t0 = 0 +default_tn = 150 +default_deltat = 0.01 +#default_amplitude = 10 #Python version +default_amplitude = 0.1 #NeuroML version +default_width = 100 +default_translation = 25 + +#function to reset input values to default on button click +def resetTodefault(_): + slider_capacitance.value = default_capacitance + slider_cond_Na.value = default_cond_Na + slider_cond_K.value = default_cond_K + slider_cond_L.value = default_cond_L + slider_pot_Na.value = default_pot_Na + slider_pot_K.value = default_pot_K + slider_pot_L.value = default_pot_L + time_start.value = default_t0 + time_end.value = default_tn + time_step.value = default_deltat + slider_amplitude.value = default_amplitude + slider_width.value = default_width + slider_translation.value = default_translation + +def showDefault(response): + if showValue_togglebtn.value: + defalultValues.layout.display = '' + else: + defalultValues.layout.display = 'none' + +#function to change slider handle colour when move from default +def highlight_slider(): + inputList = [slider_capacitance, slider_cond_Na, slider_cond_K, slider_cond_L, slider_pot_Na, slider_pot_K, slider_pot_L, slider_amplitude, slider_width, slider_translation] + inputDefault = [default_capacitance, default_cond_Na, default_cond_K, default_cond_L, default_pot_Na, default_pot_K, default_pot_L, default_amplitude, default_width, default_translation] + for l, d in zip(inputList,inputDefault): + if l.value == d: + l.style.handle_color = 'white' + else: + l.style.handle_color = 'orange' + +#defining the widgets +#Header or texts as HTMLMath to include symbols +header_capacitance = ipywidgets.HTMLMath(value=r" Membrane Capacitance, \(\mu{F}/cm^2\)") +header_conductance = ipywidgets.HTMLMath(value=r" Maximum Conductances, \(mS/cm^2\)") +header_potential = ipywidgets.HTMLMath(value=r" Nernst Reverasal Potentials, \(mV\)") +header_simTime = ipywidgets.HTMLMath(value=r" Simulation Time, \(ms\)") +#header_injCurrent = ipywidgets.HTMLMath(value=r" Injection Current, \(\mu{A}/cm^2\)") #Python version +header_injCurrent = ipywidgets.HTMLMath(value=r" Injection Current, \(nA\)") #NeuroML version +#injCurrent_note = ipywidgets.HTML(value=f"*For injection current duration = 0, the model uses default pulse signal from tutorial") + +#slider widgets +slider_capacitance = ipywidgets.FloatSlider(value=default_capacitance,min=0,max=3,step=0.1,description='Capacitance',readout=False,continuous_update=False) +slider_cond_Na = ipywidgets.FloatSlider(value=default_cond_Na,min=0,max=160,step=0.1,description='Sodium',readout=False,continuous_update=False) +slider_cond_K = ipywidgets.FloatSlider(value=default_cond_K,min=0,max=80,step=0.1,description='Potassium',readout=False,continuous_update=False) +slider_cond_L = ipywidgets.FloatSlider(value=default_cond_L,min=0,max=1,step=0.1,description='Leak',readout=False,continuous_update=False) +slider_pot_Na = ipywidgets.FloatSlider(value=default_pot_Na,min=-100,max=100,step=0.1,description='Sodium',readout=False,continuous_update=False) +slider_pot_K = ipywidgets.FloatSlider(value=default_pot_K,min=-100,max=100,step=0.1,description='Potassium',readout=False,continuous_update=False) +slider_pot_L = ipywidgets.FloatSlider(value=default_pot_L,min=-100,max=100,step=0.1,description='Leak',readout=False,continuous_update=False) +slider_amplitude = ipywidgets.FloatSlider(value=default_amplitude,min=-20,max=200,step=0.1,description='Amplitude',readout=False,continuous_update=False) +slider_width = ipywidgets.FloatSlider(value=default_width,min=0,max=500,step=0.1,description='Duration',readout=False,continuous_update=False) +slider_translation = ipywidgets.FloatSlider(value=default_translation,min=0,max=250,step=0.1,description='Time Delay',readout=False,continuous_update=False) + +#text box widgets +time_start = ipywidgets.FloatText(value=default_t0,description='Start Time',disabled=True) +time_end = ipywidgets.FloatText(value=default_tn,description='Total Time',disabled=False) +time_step = ipywidgets.FloatText(value=default_deltat,description='Time Step',disabled=False) + +#text box widgets to link with sliders (included to type in values for slider inputs also) +textBox_capacitance = ipywidgets.FloatText(value=default_capacitance,step=0.1,layout=ipywidgets.Layout(width='5%')) +textBox_cond_Na = ipywidgets.FloatText(value=default_cond_Na,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_cond_K = ipywidgets.FloatText(value=default_cond_K,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_cond_L = ipywidgets.FloatText(value=default_cond_L,step=0.1,layout=ipywidgets.Layout(width='5%')) +textBox_pot_Na = ipywidgets.FloatText(value=default_pot_Na,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_pot_K = ipywidgets.FloatText(value=default_pot_K,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_pot_L = ipywidgets.FloatText(value=default_pot_L,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_amplitude = ipywidgets.FloatText(value=default_amplitude,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_width = ipywidgets.FloatText(value=default_width,step=1,layout=ipywidgets.Layout(width='5%')) +textBox_translation = ipywidgets.FloatText(value=default_translation,step=1,layout=ipywidgets.Layout(width='5%')) + +#linking sliders and textbox for values +link_capacitance = link((slider_capacitance, 'value'), (textBox_capacitance, 'value')) +link_cond_Na = link((slider_cond_Na, 'value'), (textBox_cond_Na, 'value')) +link_cond_K = link((slider_cond_K, 'value'), (textBox_cond_K, 'value')) +link_cond_L = link((slider_cond_L, 'value'), (textBox_cond_L, 'value')) +link_pot_Na = link((slider_pot_Na, 'value'), (textBox_pot_Na, 'value')) +link_pot_K = link((slider_pot_K, 'value'), (textBox_pot_K, 'value')) +link_pot_L = link((slider_pot_L, 'value'), (textBox_pot_L, 'value')) +link_amplitude = link((slider_amplitude, 'value'), (textBox_amplitude, 'value')) +link_width = link((slider_width, 'value'), (textBox_width, 'value')) +link_translation = link((slider_translation, 'value'), (textBox_translation, 'value')) + +#define reset button and connect to fucntion call +reset_button = ipywidgets.Button(description="Reset All",button_style='warning',tooltip='Reset to default values for all user inputs') +reset_button.on_click(resetTodefault) + +#define toggle button and connect to fucntion call +showValue_togglebtn = ipywidgets.ToggleButton(value=False,description='Default Values',disabled=False,button_style='info',tooltip='Show/Hide default value below') # 'success', 'info', 'warning', 'danger' or '' +showValue_togglebtn.observe(showDefault) +defalultValues = ipywidgets.HTMLMath(value=r"\(C = 1.0\)