{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{1108: 0, 1109: 1, 1110: 2, 1111: 3, 1113: 4, 1115: 6, 1117: 5, 1119: 7}\n", "/home/jovyan/mnt/neuronunit/neuronunit/optimization/../../neuronunit/__init__.py\n" ] } ], "source": [ "\n", "#Assumption that this file was executed after first executing the bash: ipcluster start -n 8 --profile=default &\n", "import sys\n", "import os\n", "import ipyparallel as ipp\n", "from ipyparallel import depend, require, dependent\n", "\n", "rc = ipp.Client(profile='default');\n", "THIS_DIR = os.path.dirname(os.path.realpath('nsga_parallel.py'))\n", "this_nu = os.path.join(THIS_DIR,'../../')\n", "sys.path.insert(0,this_nu)\n", "#from neuronunit import tests\n", "#rc[:].use_cloudpickle()\n", "inv_pid_map = {}\n", "dview = rc[:]\n", "# lview = rc.load_balanced_view()\n", "ar = rc[:].apply_async(os.getpid)\n", "pids = ar.get_dict()\n", "inv_pid_map = pids\n", "pid_map = {}\n", "\n", "# Map PIDs onto unique numeric global identifiers via a dedicated dictionary\n", "# The pid map can be used to make file names that are uniquely identify ranks, \n", "# in a way that has continuality between sessions\n", "# Unlike Process numbers.\n", "for k,v in inv_pid_map.items():\n", " pid_map[v] = k\n", " \n", "print(pid_map)\n", "import neuronunit\n", "\n", "print(neuronunit.__file__)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Getting Rheobase cached data value for from AIBS dataset 354190013\n", "{'value': array(130.0) * pA}\n", "attempting to recover from pickled file\n", "Getting Rheobase cached data value for from AIBS dataset 354190013\n", "{'value': array(130.0) * pA}\n", "attempting to recover from pickled file\n", "/home/jovyan/mnt/neuronunit/neuronunit/optimization/../../neuronunit/__init__.py\n" ] } ], "source": [ "pid_map\n", "import get_neab\n", "\n", "\n", "def p_imports():\n", " import os\n", "\n", " THIS_DIR = os.path.dirname(os.path.realpath('nsga_parallel.py'))\n", " this_nu = os.path.join(THIS_DIR,'../../')\n", " import sys\n", " sys.path.insert(0,this_nu)\n", " import neuronunit\n", " print(neuronunit.__file__)\n", " import os,sys\n", " \n", "import numpy as np\n", "import matplotlib as matplotlib\n", "matplotlib.use('agg',warn=False)\n", "import quantities as pq\n", "import sciunit\n", "\n", "#Over ride any neuron units in the PYTHON_PATH with this one.\n", "#only appropriate for development.\n", "THIS_DIR = os.path.dirname(os.path.realpath('nsga_parallel.py'))\n", "this_nu = os.path.join(THIS_DIR,'../..')\n", "sys.path.insert(0,this_nu)\n", "\n", "import neuronunit\n", "from neuronunit import aibs\n", "import pdb\n", "import pickle\n", "from scoop import futures\n", "from scoop import utils\n", "try:\n", " IZHIKEVICH_PATH = os.path.join(os.getcwd(),'NeuroML2')\n", " assert os.path.isdir(IZHIKEVICH_PATH)\n", "except AssertionError:\n", " # Replace this with the path to your Izhikevich NeuroML2 directory.\n", " IZHIKEVICH_PATH = os.path.join(THIS_DIR,'NeuroML2')\n", "\n", "LEMS_MODEL_PATH = os.path.join(IZHIKEVICH_PATH,'LEMS_2007One.xml')\n", "import time\n", "from pyneuroml import pynml\n", "import quantities as pq\n", "from neuronunit import tests as nu_tests, neuroelectro\n", "neuron = {'nlex_id': 'nifext_50'} # Layer V pyramidal cell\n", "tests = []\n", "\n", "dataset_id = 354190013 # Internal ID that AIBS uses for a particular Scnn1a-Tg2-Cre\n", " # Primary visual area, layer 5 neuron.\n", "observation = aibs.get_observation(dataset_id,'rheobase')\n", "print(observation)\n", "ne_pickle = os.path.join(THIS_DIR,\"neuroelectro.pickle\")\n", "\n", "if os.path.isfile(ne_pickle):\n", " print('attempting to recover from pickled file')\n", " with open(ne_pickle, 'rb') as f:\n", " tests = pickle.load(f)\n", "else:\n", " print('Checked path %s and no pickled file found. Commencing time intensive Download' % ne_pickle)\n", " tests += [nu_tests.RheobaseTest(observation=observation)]\n", " test_class_params = [(nu_tests.InputResistanceTest,None),\n", " (nu_tests.TimeConstantTest,None),\n", " (nu_tests.CapacitanceTest,None),\n", " (nu_tests.RestingPotentialTest,None),\n", " (nu_tests.InjectedCurrentAPWidthTest,None),\n", " (nu_tests.InjectedCurrentAPAmplitudeTest,None),\n", " (nu_tests.InjectedCurrentAPThresholdTest,None)]\n", "\n", "\n", " for cls,params in test_class_params:\n", " #use of the variable 'neuron' in this conext conflicts with the module name 'neuron'\n", " #at the moment it doesn't seem to matter as neuron is encapsulated in a class, but this could cause problems in the future.\n", " observation = cls.neuroelectro_summary_observation(neuron)\n", " tests += [cls(observation,params=params)]\n", "\n", " with open('neuroelectro.pickle', 'wb') as handle:\n", " pickle.dump(tests, handle)\n", "def update_amplitude(test,tests,score):\n", " rheobase = score.prediction['value']#first find a value for rheobase\n", " #then proceed with other optimizing other parameters.\n", " #for i in\n", "\n", "\n", " for i in [tests[-3],tests[-2],tests[-1]]:\n", " # Set current injection to just suprathreshold\n", "\n", " i.params['injected_square_current']['amplitude'] = rheobase*1.01\n", "#Don't do the rheobase test. This is a serial bottle neck that must occur before any parallel optomization.\n", "#Its because the optimization routine must have apriori knowledge of what suprathreshold current injection values are for each model.\n", "hooks = {tests[0]:{'f':update_amplitude}} #This is a trick to dynamically insert the method\n", "\n", "#update amplitude at the location in sciunit thats its passed to, without any loss of generality.\n", "suite = sciunit.TestSuite(\"vm_suite\",tests,hooks=hooks)\n", "\n", "\n", " \n", "\n", "dview.apply_sync(p_imports)\n", "p_imports()\n", "from deap import base\n", "from deap import creator\n", "from deap import tools\n", "toolbox = base.Toolbox()\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "importing matplotlib on engine(s)\n", "importing neuronunit on engine(s)\n", "importing model_parameters from neuronunit.optimization on engine(s)\n", "importing pdb on engine(s)\n", "importing array on engine(s)\n", "importing random on engine(s)\n", "importing sys on engine(s)\n", "importing numpy on engine(s)\n", "importing matplotlib.pyplot on engine(s)\n", "importing quantities on engine(s)\n", "importing algorithms from deap on engine(s)\n", "importing base from deap on engine(s)\n", "importing diversity,convergence,hypervolume from deap.benchmarks.tools on engine(s)\n", "importing creator from deap on engine(s)\n", "importing tools from deap on engine(s)\n", "importing os on engine(s)\n", "importing os.path on engine(s)\n", "importing deap on engine(s)\n", "importing neuronunit.capabilities on engine(s)\n" ] } ], "source": [ "\n", "\n", "with dview.sync_imports(): # Causes each of these things to be imported on the workers as well as here.\n", " #import get_neab\n", " import matplotlib\n", " import neuronunit\n", " from neuronunit.optimization import model_parameters as modelp\n", "\n", " matplotlib.use('Agg') # Need to do this before importing neuronunit on a Mac, because OSX backend won't work\n", " # on the worker threads.\n", " import pdb\n", " import array\n", " import random\n", " import sys\n", "\n", " import numpy as np\n", " import matplotlib.pyplot as plt\n", " import quantities as pq\n", " from deap import algorithms\n", " from deap import base\n", " from deap.benchmarks.tools import diversity, convergence, hypervolume\n", " from deap import creator\n", " from deap import tools\n", "\n", "\n", " import os\n", " import os.path\n", "\n", " import deap as deap\n", "\n", "\n", " import quantities as pq\n", " import neuronunit.capabilities as cap\n", " history = tools.History()\n", " import numpy as np\n", "\n", " import numpy as np\n", " import matplotlib.pyplot as plt\n", "\n", "\n", "\n", "\n", "def p_imports():\n", " \n", " from neuronunit.models import backends\n", " from neuronunit.models.reduced import ReducedModel\n", " #import get_neab\n", " #new_file_path = str(get_neab.LEMS_MODEL_PATH)+str(int(os.getpid()))\n", " try:\n", " IZHIKEVICH_PATH = os.path.join(os.getcwd(),'NeuroML2')\n", " assert os.path.isdir(IZHIKEVICH_PATH)\n", " except AssertionError:\n", " # Replace this with the path to your Izhikevich NeuroML2 directory.\n", " IZHIKEVICH_PATH = os.path.join(THIS_DIR,'NeuroML2')\n", "\n", " LEMS_MODEL_PATH = os.path.join(IZHIKEVICH_PATH,'LEMS_2007One.xml')\n", " new_file_path = str(LEMS_MODEL_PATH)+str(pid_map[int(os.getpid())])\n", " os.system('cp ' + str(LEMS_MODEL_PATH)+str(' ') + new_file_path)\n", " model = ReducedModel(new_file_path,name='vanilla',backend='NEURON')\n", "\n", " #model = ReducedModel(get_neab.LEMS_MODEL_PATH,name='vanilla',backend='NEURON')\n", " model.load_model()\n", " #utilities.get_neab = get_neab\n", " #utilities.model = model\n", " return\n", "\n", "#dview.apply_sync(p_imports)\n", "#p_imports()\n", "from deap import base\n", "from deap import creator\n", "toolbox = base.Toolbox()\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", "class Individual(object):\n", " '''\n", " When instanced the object from this class is used as one unit of chromosome or allele by DEAP.\n", " Extends list via polymorphism.\n", " '''\n", " def __init__(self, *args):\n", " list.__init__(self, *args)\n", " self.error=None\n", " self.results=None\n", " self.name=''\n", " self.attrs = {}\n", " self.params=None\n", " self.score=None\n", " self.fitness=None\n", " self.lookup={}\n", " self.rheobase=None\n", " self.fitness = creator.FitnessMin\n", "\n", "with dview.sync_imports():\n", " creator.create(\"FitnessMin\", base.Fitness, weights=(-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0))\n", " creator.create(\"Individual\", list, fitness=creator.FitnessMin)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", "\n", "def p_imports():\n", " toolbox = base.Toolbox()\n", " from neuronunit.optimization import model_parameters as modelp\n", " import numpy as np\n", " #from neuronunit.tests import utilities as outils\n", " BOUND_LOW = [ np.min(i) for i in modelp.model_params.values() ]\n", " BOUND_UP = [ np.max(i) for i in modelp.model_params.values() ]\n", " NDIM = len(BOUND_UP)\n", " def uniform(low, up, size=None):\n", " try:\n", " return [random.uniform(a, b) for a, b in zip(low, up)]\n", " except TypeError:\n", " return [random.uniform(a, b) for a, b in zip([low] * size, [up] * size)]\n", " creator.create(\"FitnessMin\", base.Fitness, weights=(-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0))\n", " creator.create(\"Individual\", list, fitness=creator.FitnessMin)\n", " toolbox.register(\"attr_float\", uniform, BOUND_LOW, BOUND_UP, NDIM)\n", " toolbox.register(\"Individual\", tools.initIterate, creator.Individual, toolbox.attr_float)\n", " toolbox.register(\"population\", tools.initRepeat, list, toolbox.Individual)\n", " toolbox.register(\"select\", tools.selNSGA2)\n", " return\n", " #model = outils.model\n", "dview.apply_sync(p_imports)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "tests" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", "def get_trans_dict(param_dict):\n", " trans_dict = {}\n", " for i,k in enumerate(list(param_dict.keys())):\n", " trans_dict[i]=k\n", " return trans_dict\n", "import model_parameters\n", "param_dict = model_parameters.model_params\n", "\n", "def update_vm_pop(pop, trans_dict):\n", " '''\n", " inputs a population of genes/alleles, the population size MU, and an optional argument of a rheobase value guess\n", " outputs a population of genes/alleles, a population of individual object shells, ie a pickleable container for gene attributes.\n", " Rationale, not every gene value will result in a model for which rheobase is found, in which case that gene is discarded, however to\n", " compensate for losses in gene population size, more gene samples must be tested for a successful return from a rheobase search.\n", " If the tests return are successful these new sampled individuals are appended to the population, and then their attributes are mapped onto\n", " corresponding virtual model objects.\n", " '''\n", " from itertools import repeat\n", " import numpy as np\n", " import copy\n", " pop = [toolbox.clone(i) for i in pop ]\n", " #from neuronunit.optimization.utilities import VirtualModel\n", " from neuronunit.tests import VirtualModel\n", " def transform(ind):\n", " vm = VirtualModel()\n", " param_dict={}\n", " for i,j in enumerate(ind):\n", " param_dict[trans_dict[i]] = str(j)\n", " vm.attrs = param_dict\n", " vm.name = vm.attrs\n", " return vm\n", " if len(pop) > 1:\n", " vmpop = dview.map_sync(transform, pop)\n", " vmpop = list(copy.copy(vmpop))\n", " else:\n", " vmpop = transform(pop)\n", " return vmpop\n", "\n", "#@depend(update_vm_pop,True)\n", "def check_rheobase(vmpop,pop=None):\n", " '''\n", " inputs a population of genes/alleles, the population size MU, and an optional argument of a rheobase value guess\n", " outputs a population of genes/alleles, a population of individual object shells, ie a pickleable container for gene attributes.\n", " Rationale, not every gene value will result in a model for which rheobase is found, in which case that gene is discarded, however to\n", " compensate for losses in gene population size, more gene samples must be tested for a successful return from a rheobase search.\n", " If the tests return are successful these new sampled individuals are appended to the population, and then their attributes are mapped onto\n", " corresponding virtual model objects.\n", " '''\n", " def check_fix_range(vms):\n", " '''\n", " Inputs: lookup, A dictionary of previous current injection values\n", " used to search rheobase\n", " Outputs: A boolean to indicate if the correct rheobase current was found\n", " and a dictionary containing the range of values used.\n", " If rheobase was actually found then rather returning a boolean and a dictionary,\n", " instead logical True, and the rheobase current is returned.\n", " given a dictionary of rheobase search values, use that\n", " dictionary as input for a subsequent search.\n", " '''\n", " import pdb\n", " import copy\n", " import numpy as np\n", " import quantities as pq\n", " sub=[]\n", " supra=[]\n", " steps=[]\n", " vms.rheobase=0.0\n", " for k,v in vms.lookup.items():\n", " if v==1:\n", " #A logical flag is returned to indicate that rheobase was found.\n", " vms.rheobase=float(k)\n", " vms.steps = 0.0\n", " vms.boolean = True\n", " return vms\n", " elif v==0:\n", " sub.append(k)\n", " elif v>0:\n", " supra.append(k)\n", "\n", " sub=np.array(sub)\n", " supra=np.array(supra)\n", "\n", " if len(sub)!=0 and len(supra)!=0:\n", " #this assertion would only be wrong if there was a bug\n", " print(str(bool(sub.max()>supra.min())))\n", " assert not sub.max()>supra.min()\n", " if len(sub) and len(supra):\n", " everything = np.concatenate((sub,supra))\n", "\n", " center = np.linspace(sub.max(),supra.min(),7.0)\n", " centerl = list(center)\n", " for i,j in enumerate(centerl):\n", " if i in list(everything):\n", " np.delete(center,i)\n", " del centerl[i]\n", " #delete the index\n", " #np.delete(center,np.where(everything is in center))\n", " #make sure that element 4 in a seven element vector\n", " #is exactly half way between sub.max() and supra.min()\n", " center[int(len(center)/2)+1]=(sub.max()+supra.min())/2.0\n", " steps = [ i*pq.pA for i in center ]\n", "\n", " elif len(sub):\n", " steps = np.linspace(sub.max(),2*sub.max(),7.0)\n", " np.delete(steps,np.array(sub))\n", " steps = [ i*pq.pA for i in steps ]\n", "\n", " elif len(supra):\n", " steps = np.linspace(-2*(supra.min()),supra.min(),7.0)\n", " np.delete(steps,np.array(supra))\n", " steps = [ i*pq.pA for i in steps ]\n", "\n", " vms.steps = steps\n", " vms.rheobase = None\n", " return copy.copy(vms)\n", "\n", "\n", " def check_current(ampl,vm):\n", " '''\n", " Inputs are an amplitude to test and a virtual model\n", " output is an virtual model with an updated dictionary.\n", " '''\n", "\n", " global model\n", " import quantities as pq\n", " import get_neab\n", "\n", " from neuronunit.models import backends\n", " from neuronunit.models.reduced import ReducedModel\n", "\n", " new_file_path = str(get_neab.LEMS_MODEL_PATH)+str(int(os.getpid()))\n", "\n", " model = ReducedModel(new_file_path,name=str(vm.attrs),backend='NEURON')\n", " model.load_model()\n", " model.update_run_params(vm.attrs)\n", "\n", " DELAY = 100.0*pq.ms\n", " DURATION = 1000.0*pq.ms\n", " params = {'injected_square_current':\n", " {'amplitude':100.0*pq.pA, 'delay':DELAY, 'duration':DURATION}}\n", "\n", "\n", " print('print model name {0}'.format(model.name))\n", " if float(ampl) not in vm.lookup or len(vm.lookup)==0:\n", "\n", " current = params.copy()['injected_square_current']\n", "\n", " uc = {'amplitude':ampl}\n", " current.update(uc)\n", " current = {'injected_square_current':current}\n", " vm.run_number += 1\n", " model.update_run_params(vm.attrs)\n", " model.inject_square_current(current)\n", " vm.previous = ampl\n", " n_spikes = model.get_spike_count()\n", " vm.lookup[float(ampl)] = n_spikes\n", " if n_spikes == 1:\n", " vm.rheobase = float(ampl)\n", " print(type(vm.rheobase))\n", " print('current {0} spikes {1}'.format(vm.rheobase,n_spikes))\n", " vm.name = str('rheobase {0} parameters {1}'.format(str(current),str(model.params)))\n", " return vm\n", "\n", " return vm\n", " if float(ampl) in vm.lookup:\n", " return vm\n", "\n", " from itertools import repeat\n", " import numpy as np\n", " import copy\n", " import pdb\n", " import get_neab\n", "\n", " def init_vm(vm):\n", " import quantities as pq\n", " import numpy as np\n", " vm.boolean = False\n", " steps = list(np.linspace(-50,200,7.0))\n", " steps_current = [ i*pq.pA for i in steps ]\n", " vm.steps = steps_current\n", " return vm\n", "\n", " def find_rheobase(vm):\n", " from neuronunit.models import backends\n", " from neuronunit.models.reduced import ReducedModel\n", " import get_neab\n", " print(pid_map[int(os.getpid())])\n", "\n", " new_file_path = str(get_neab.LEMS_MODEL_PATH)+str(os.getpid())\n", " model = ReducedModel(new_file_path,name=str(vm.attrs),backend='NEURON')\n", " #model = ReducedModel(get_neab.LEMS_MODEL_PATH,name=str(vm.attrs),backend='NEURON')\n", " model.load_model()\n", " model.update_run_params(vm.attrs)\n", " cnt = 0\n", " while vm.boolean == False:# and cnt <21:\n", " for step in vm.steps:\n", " vm = check_current(step, vm)#,repeat(vms))\n", " vm = check_fix_range(vm)\n", " cnt+=1\n", " return vm\n", "\n", " vmpop = dview.map_sync(init_vm,vmpop)\n", " vmpop = list(vmpop)\n", "\n", " vmpop = lview.map_sync(find_rheobase,vmpop)\n", " vmpop = list(vmpop)\n", " if type(pop) is not type(None):\n", " vmpop, pop = final_check(vmpop,pop)\n", " return vmpop, pop\n", "\n", "\n", "print('yes')\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", "\n", "def trivial(vms):#This method must be pickle-able for scoop to work.\n", " '''\n", " Inputs: An individual gene from the population that has compound parameters, and a tuple iterator that\n", " is a virtual model object containing an appropriate parameter set, zipped togethor with an appropriate rheobase\n", " value, that was found in a previous rheobase search.\n", "\n", " outputs: a tuple that is a compound error function that NSGA can act on.\n", "\n", " Assumes rheobase for each individual virtual model object (vms) has already been found\n", " there should be a check for vms.rheobase, and if not then error.\n", " Inputs a gene and a virtual model object.\n", " outputs are error components.\n", " '''\n", " print('hello')\n", " from neuronunit.models import backends\n", " from neuronunit.models.reduced import ReducedModel\n", " import quantities as pq\n", " import numpy as np\n", " import get_neab\n", "\n", " new_file_path = str(get_neab.LEMS_MODEL_PATH)+str(os.getpid())\n", " model = ReducedModel(new_file_path,name=str(vms.attrs),backend='NEURON')\n", " model.load_model()\n", " assert type(vms.rheobase) is not type(None)\n", " DELAY = 100.0*pq.ms\n", " DURATION = 1000.0*pq.ms\n", " AMPLITUDE = 100.0*pq.pA\n", " params = {'injected_square_current':\n", " {'amplitude':AMPLITUDE, 'delay':DELAY, 'duration':DURATION}}\n", "\n", " model.update_run_params(vms.attrs)\n", "\n", "\n", " for k,v in enumerate(get_neab.suite.tests):\n", " if k == 0:\n", " v.prediction = {}\n", " v.prediction['value'] = vms.rheobase * pq.pA\n", " if k == 1 or k == 2 or k == 3:\n", " v.params['injected_square_current']['duration'] = 100 * pq.ms\n", " v.params['injected_square_current']['amplitude'] = -10 *pq.pA\n", " v.params['injected_square_current']['delay'] = 30 * pq.ms\n", "\n", " model.update_run_params(vms.attrs)\n", " score = get_neab.suite.judge(model, stop_on_error = False)#, deep_error = True)\n", " vms.score = score\n", " model.run_number+=1\n", " # Run the model, then:\n", " error = []\n", " other_mean = np.mean([i for i in score.norm_score.values.tolist()[0] if type(i) is not type(None)])\n", " for my_score in score.norm_score.values.tolist()[0]:\n", " if isinstance(my_score,sciunit.ErrorScore):\n", " error.append(-100.0)\n", " elif isinstance(my_score,type(None)):\n", " error.append(other_mean)\n", " else:\n", " # The further away from zero the least the error.\n", " # achieve this by going 1/RMS\n", " if my_score == 0:\n", " error.append(-100.0)\n", " else:\n", " error.append(-1.0/np.abs((my_score)))\n", " return error[0],error[1],error[2],error[3],error[4],error[5],error[6],error[7],\n", "\n", "\n", "BOUND_LOW = [ np.min(i) for i in modelp.model_params.values() ]\n", "BOUND_UP = [ np.max(i) for i in modelp.model_params.values() ]\n", "NDIM = len(BOUND_UP)\n", "\n", "def uniform(low, up, size=None):\n", " try:\n", " return [random.uniform(a, b) for a, b in zip(low, up)]\n", " except TypeError:\n", " return [random.uniform(a, b) for a, b in zip([low] * size, [up] * size)]\n", "toolbox = base.Toolbox()\n", "\n", "toolbox.register(\"attr_float\", uniform, BOUND_LOW, BOUND_UP, NDIM)\n", "toolbox.register(\"Individual\", tools.initIterate, creator.Individual, toolbox.attr_float)\n", "toolbox.register(\"population\", tools.initRepeat, list, toolbox.Individual)\n", "toolbox.register(\"select\", tools.selNSGA2)\n", "\n", "toolbox.register(\"mate\", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0)\n", "toolbox.register(\"mutate\", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM)\n", "toolbox.decorate(\"mate\", history.decorator)\n", "toolbox.decorate(\"mutate\", history.decorator)\n", "toolbox.register(\"select\", tools.selNSGA2)\n", "print('yes?')\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", "##\n", "# Start of the Genetic Algorithm\n", "##\n", "\n", "\n", "NGEN = 3\n", "import numpy as np\n", "MU = 4\n", "CXPB = 0.9\n", "pf = tools.ParetoFront()\n", "dview.push({'pf':pf})\n", "trans_dict = get_trans_dict(param_dict)\n", "td = trans_dict\n", "dview.push({'trans_dict':trans_dict,'td':td})\n", "'''\n", "new_checkpoint_path = str('rh_checkpoint')+str('.p')\n", "try:\n", " assert open(new_checkpoint_path,'rb')\n", " cp = pickle.load(open(new_checkpoint_path,'rb'))\n", " vmpop = cp['vmpop']\n", " pop = cp['pop']\n", " print(pop,vmpop)\n", "\n", "'''\n", "\n", "#except:\n", "pop = toolbox.population(n = MU)\n", "pop = [ toolbox.clone(i) for i in pop ]\n", "pf.update([toolbox.clone(i) for i in pop])\n", "vmpop = update_vm_pop(pop, td)\n", "print(vmpop)\n", "vmpop , _= check_rheobase(vmpop)\n", "for i in vmpop:\n", " print('the rheobase value is {0}'.format(i.rheobase))\n", "\n", "\n", "new_checkpoint_path = str('rh_checkpoint')+str('.p')\n", "import pickle\n", "with open(new_checkpoint_path,'wb') as handle:\n", " pickle.dump({'vmpop':vmpop,'pop':pop}, handle)\n", "cp = pickle.load(open(new_checkpoint_path,'rb'))\n", "#\n", "for i in vmpop:\n", " print(i, i.rheobase, 'got here')\n", "assert type(vmpop) is not type(None)\n", "import copy\n", "# eventually serial syntax above will be replaced with map, and then dview.map_sync\n", "#fitnesses = list(map(evaluate_e, pop, vmpop ))\n", "##returned = list(dview.map_sync(trivial, copy.copy(vmpop) ))\n", "#for i in returned:\n", "# print(i)\n", "#import copy\n", "\n", "fitnesses = list(dview.map_sync(trivial, copy.copy(vmpop) ))\n", "\n", "\n", "\n", "for i in fitnesses:\n", " print('the fitness value is {0}'.format(i))\n", "checkpoint = {}\n", "checkpoint[0] = [ fitnesses, vmpop, pop ]\n", "import pickle\n", "with open('new_checkpoint_path.p','wb') as handle:\n", " pickle.dump(checkpoint,handle)\n", "invalid_ind = [ ind for ind in pop if not ind.fitness.valid ]\n", "for gen in range(1, NGEN):\n", "\n", "\n", " invalid_ind = [ ind for ind in pop if ind.fitness.valid ]\n", " offspring = tools.selTournamentDCD(invalid_ind, len(invalid_ind))\n", " offspring = tools.selNSGA2(pop, len(pop))\n", " assert len(offspring)!=0\n", " offspring = [toolbox.clone(ind) for ind in offspring]\n", " assert len(offspring)!=0\n", " for ind1, ind2 in zip(offspring[::2], offspring[1::2]):\n", " if random.random() <= CXPB:\n", " toolbox.mate(ind1, ind2)\n", " toolbox.mutate(ind1)\n", " toolbox.mutate(ind2)\n", " del ind1.fitness.values, ind2.fitness.values\n", "\n", " invalid_ind = [ ind for ind in offspring if ind.fitness.valid ]\n", " for i in fitnesses:\n", " print('the fitness value is {0}'.format(i))\n", "\n", " vmpop = update_vm_pop(offspring, td)\n", " vmpop = check_rheobase(vmpop)\n", " #fitnesses = list(dview.map_sync(evaluate_e, vmpop))\n", " fitnesses = list(dview.map_sync(trivial, vmpop))\n", " for i in fitnesses:\n", " print(i)\n", " pop = apply_fitness(copy.copy(fitnesses),copy.copy(pop))\n", " \n", "\n", " checkpoint[gen] = [fitnesses,vmpop,offspring]#,evaluate_e.tests.suite]\n", " import pickle\n", " new_checkpoint_path = str(checkpoint)+str(pid_map[int(os.getpid())])+str('.p')\n", " with open('new_checkpoint_path.p','wb') as handle:\n", " pickle.dump(checkpoint,handle)\n", " #pickle.dump(handle,checkpoint)\n", "\n", " for ind, fit in zip(offspring, fitnesses):\n", " ind.fitness.values = fit\n", " size_delta = MU-len(offspring)\n", " assert size_delta == 0\n", " pop = toolbox.select(offspring, MU)\n", " print('the pareto front is: {0}'.format(pf))\n", "\n", "pop,vmpop = list(update_vm_pop(pop,td))\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "\n", "import pandas as pd\n", "scores = []\n", "for j,i in enumerate(pf):\n", " i.name = vmpop[j].attrs\n", " scores.append(vmpop[j].score)\n", " print(vmpop[j].score)\n", "\n", "sc = pd.DataFrame(scores[0])\n", "sc\n", "data = [ pf[0].name ]\n", "model_values0 = pd.DataFrame(data)\n", "model_values0\n", "rhstorage[0]\n", "\n", "data = [ pf[1].name ]\n", "model_values0 = pd.DataFrame(data)\n", "model_values0\n", "\n", "\n", "\n", "sc1 = pd.DataFrame(scores[1])\n", "sc1\n", "\n", "rhstorage[1]\n", "\n", "data = [ pf[1].name ]\n", "model_values1 = pd.DataFrame(data)\n", "model_values1\n", "\n", "pf[1].name\n", "\n", "import pickle\n", "import pandas as pd\n", "\n", "try:\n", " ground_error = pickle.load(open('big_model_evaulated.pickle','rb'))\n", "except:\n", " # The exception code is only skeletal, it would not actually work, but its the right principles.\n", " print('{0} it seems the error truth data does not yet exist, lets create it now '.format(str(False)))\n", " ut = import neuronunit.utilities.Utilities\n", "\n", " ground_error = list(dview.map_sync(ut.func2map, ground_truth))\n", " pickle.dump(ground_error,open('big_model_evaulated.pickle','wb'))\n", "\n", "# ground_error_nsga=list(zip(vmpop,pop,invalid_ind))\n", "# pickle.dump(ground_error_nsga,open('nsga_evaulated.pickle','wb'))\n", "\n", "sum_errors = [ i[0] for i in ground_error ]\n", "composite_errors = [ i[1] for i in ground_error ]\n", "attrs = [ i[2] for i in ground_error ]\n", "rheobase = [ i[3] for i in ground_error ]\n", "\n", "indexs = [i for i,j in enumerate(sum_errors) if j==np.min(sum_errors) ][0]\n", "indexc = [i for i,j in enumerate(composite_errors) if j==np.min(composite_errors) ][0]\n", "\n", "df_0 = pd.DataFrame([ (k,v,vmpop[0].attrs[k],float(v)-float(vmpop[0].attrs[k])) for k,v in ground_error[indexc][2].items() ])\n", "df_1 = pd.DataFrame([ (k,v,vmpop[1].attrs[k],float(v)-float(vmpop[1].attrs[k])) for k,v in ground_error[indexc][2].items() ])\n", "\n", "\n", "df_0\n", "\n", "df_1" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.2" } }, "nbformat": 4, "nbformat_minor": 2 }