Skip to content

Commit

Permalink
Rank-1 NES algorithm
Browse files Browse the repository at this point in the history
Signed-off-by: unknown <[email protected]>
  • Loading branch information
unknown authored and unknown committed Mar 5, 2012
1 parent 87c7ac3 commit 8fe989d
Show file tree
Hide file tree
Showing 19 changed files with 1,268 additions and 23 deletions.
68 changes: 68 additions & 0 deletions examples/optimization/multiobjective/constnsga2jpq.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/usr/bin/env python
""" An illustration of using the NSGA-II multi-objective optimization algorithm
on Constrained Multi-Objective Optimization benchmark function. """

__author__ = 'Jean Pierre Queau, [email protected]'

from pybrain.optimization import ConstMultiObjectiveGA
from pybrain.rl.environments.functions.multiobjective import ConstDeb,ConstSrn, \
ConstOsy,ConstTnk,ConstBnh
import pylab
from scipy import zeros, array

# The Deb function
#f = ConstDeb()
# The Srinivas & Deb function
#f = ConstSrn()
# The Osyczka & Kundu function
#f = ConstOsy()
# The Tanaka function
#f = ConstTnk()
# The Binh & Korn function
f = ConstBnh()
# start at the origin
x0 = zeros(f.indim)

x0 = array([min_ for min_, max_ in f.xbound])

# the optimization for a maximum of 25 generations
n = ConstMultiObjectiveGA(f, x0, storeAllEvaluations = True, populationSize = 100, eliteProportion = 1.0,
topProportion = 1.0, mutationProb = 1.0, mutationStdDev = 0.3, storeAllPopulations = True, allowEquality = False)
print 'Start Learning'
n.learn(50)
print 'End Learning'
# plotting the results (blue = all evaluated points, red = resulting pareto front)
print 'Plotting the Results'
print 'All Evaluations.... take some time'
for x in n._allEvaluations:
if x[1]:
pylab.plot([x[0][0]], [x[0][1]], 'b.')
else:
pylab.plot([x[0][0]], [x[0][1]], 'r.')
for x in n.bestEvaluation: pylab.plot([x[0][0]], [x[0][1]], 'go')
pylab.show()
print 'Pareto Front'
for x in n.bestEvaluation: pylab.plot([x[0][0]], [x[0][1]], 'go')
pylab.show()

print '==========='
print '= Results ='
print '==========='
'''
i=0
for gen in n._allGenerations:
print 'Generation: ',i
for j in range(len(gen[1])):
print gen[1].keys()[j],gen[1].values()[j]
i+=1
'''
print 'Population size ',n.populationSize
print 'Elitism Proportion ',n.eliteProportion
print 'Mutation Probability ',n.mutationProb
print 'Mutation Std Deviation ',n.mutationStdDev
print 'Objective Evaluation number ',n.numEvaluations
print 'last generation Length of bestEvaluation ',len(n.bestEvaluation)
print 'Best Evaluable : Best Evaluation'
for i in range(len(n.bestEvaluation)):
assert len(n.bestEvaluation) == len(n.bestEvaluable)
print n.bestEvaluable[i],':',n.bestEvaluation[i]
58 changes: 58 additions & 0 deletions examples/optimization/multiobjective/nsga2jpq.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/usr/bin/env python
""" An illustration of using the NSGA-II multi-objective optimization algorithm
on Unconstrained Multi-Objective Optimization benchmark function. """

__author__ = 'Jean Pierre Queau, [email protected]'

from pybrain.optimization import MultiObjectiveGA
from pybrain.rl.environments.functions.multiobjective import Deb, Pol
import pylab
from scipy import zeros, array

# The Deb function
#f = Deb()
# The Pol function
f = Pol()

# start at the origin
x0 = zeros(f.indim)

x0 = array([min_ for min_, max_ in f.xbound])

# the optimization for a maximum of 25 generations
n = MultiObjectiveGA(f, x0, storeAllEvaluations = True, populationSize = 50, eliteProportion = 1.0,
topProportion = 1.0, mutationProb = 0.5, mutationStdDev = 0.1, storeAllPopulations = True, allowEquality = False)
print 'Start Learning'
n.learn(30)
print 'End Learning'

# plotting the results (blue = all evaluated points, red = resulting pareto front)
print 'Plotting the Results'
print 'All Evaluations'
for x in n._allEvaluations: pylab.plot([x[0]], [x[1]], 'b.')
for x in n.bestEvaluation: pylab.plot([x[0]], [x[1]], 'ro')
pylab.show()
print 'Pareto Front'
for x in n.bestEvaluation: pylab.plot([x[0]], [x[1]], 'ro')
pylab.show()
print '==========='
print '= Results ='
print '==========='
'''
i=0
for gen in n._allGenerations:
print 'Generation: ',i
for j in range(len(gen[1])):
print gen[1].keys()[j],gen[1].values()[j]
i+=1
'''
print 'Population size ',n.populationSize
print 'Elitism Proportion ',n.eliteProportion
print 'Mutation Probability ',n.mutationProb
print 'Mutation Std Deviation ',n.mutationStdDev
print 'Objective Evaluation number ',n.numEvaluations
print 'last generation Length of bestEvaluation ',len(n.bestEvaluation)
print 'Best Evaluable : Best Evaluation'
for i in range(len(n.bestEvaluation)):
assert len(n.bestEvaluation) == len(n.bestEvaluable)
print n.bestEvaluable[i],':',n.bestEvaluation[i]
47 changes: 47 additions & 0 deletions examples/optimization/multiobjective/readme.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@

nsga2.py is the original example for the Kurbenchmark function

nsga2jpq.py is the modified example to demonstrate the implementation
for Unconstrained Multi-Objective Optimization Problems and
has been validated against the DEB by Deb (2001) function
and the POL by Poloni & all (2000) function.
Modification have been made to the original code to implement
the boundaries of the parameters

constnsga2jpq is new and is an example to demonstrate the implementation
for Constrained Multi-Objective Optimization Problems and
has been validated against the DEB CONSTR by Deb (2001)
function, the SRN by Srinivas & Deb (1994) and the
OSY by Osyczka and Kundu (1995).
New class has been defined and modification have been made
to the original code to implement the constrained functions.


the modifications or new fuctions added are encapsulated in the code by

""" added by JPQ """"

....
....

# ---


The following code files have been modified:

pybrain/rl/environments/functions/multiobjective.py
pybrain/rl/environments/functions/transformations.py
pybrain/tools/nondominated.py
pybrain/optimization/optimizer.py
pybrain/optimization/populationbased/ga.py
pybrain/optimization/populationbased/multiobjective/__init__.py
pybrain/optimization/populationbased/multiobjective/nsga2.py
pybrain/optimization/populationbased/multiobjective/constnsga2.py

It is clear at the end that nsga2.py and constnsga2.py should be merged.
In the transformation file only the oppositeFunction has been modified, may
be the other functions should also be modified but this was not required to
make the code running.

Hope this will help to make you understanding what i have been doing.

70 changes: 70 additions & 0 deletions examples/supervised/Test Network Reader&Writer/jpq2layersReader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from pybrain.structure import FeedForwardNetwork
from pybrain.tools.validation import ModuleValidator,Validator
from pybrain.utilities import percentError
from pybrain.tools.customxml import NetworkReader
from pybrain.datasets import SupervisedDataSet
import numpy
import pylab
import os

def myplot(trns,ctrns = None,tsts = None,ctsts = None,iter = 0):
plotdir = os.path.join(os.getcwd(),'plot')
pylab.clf()
try:
assert len(tsts) > 1
tstsplot = True
except:
tstsplot = False
try:
assert len(ctsts) > 1
ctstsplot = True
except:
ctstsplot = False
try:
assert len(ctrns) > 1
ctrnsplot = True
except:
ctrnsplot = False
if tstsplot:
pylab.plot(tsts['input'],tsts['target'],c='b')
pylab.scatter(trns['input'],trns['target'],c='r')
if ctrnsplot:
pylab.scatter(trns['input'],ctrns,c='y')
if tstsplot and ctstsplot:
pylab.plot(tsts['input'], ctsts,c='g')

pylab.xlabel('x')
pylab.ylabel('y')
pylab.title('Neuron Number:'+str(nneuron))
pylab.grid(True)
plotname = os.path.join(plotdir,('jpq2layers_plot'+ str(iter)))
pylab.savefig(plotname)


# set-up the neural network
nneuron = 5
mom = 0.98
netname="LSL-"+str(nneuron)+"-"+str(mom)
mv=ModuleValidator()
v = Validator()


#create the test DataSet
x = numpy.arange(0.0, 1.0+0.01, 0.01)
s = 0.5+0.4*numpy.sin(2*numpy.pi*x)
tsts = SupervisedDataSet(1,1)
tsts.setField('input',x.reshape(len(x),1))
tsts.setField('target',s.reshape(len(s),1))
#read the train DataSet from file
trndata = SupervisedDataSet.loadFromFile(os.path.join(os.getcwd(),'trndata'))

myneuralnet = os.path.join(os.getcwd(),'myneuralnet.xml')
if os.path.isfile(myneuralnet):
n = NetworkReader.readFrom(myneuralnet,name=netname)
#calculate the test DataSet based on the trained Neural Network
ctsts = mv.calculateModuleOutput(n,tsts)
tserr = v.MSE(ctsts,tsts['target'])
print 'MSE error on TSTS:',tserr
myplot(trndata,tsts = tsts,ctsts = ctsts)

pylab.show()
130 changes: 130 additions & 0 deletions examples/supervised/Test Network Reader&Writer/jpq2layersWriter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.structure import BiasUnit,TanhLayer
from pybrain.structure import FullConnection
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer, RPropMinusTrainer
from pybrain.tools.validation import ModuleValidator,Validator
from pybrain.utilities import percentError
from pybrain.tools.customxml import NetworkWriter
import numpy
import pylab
import os

def myplot(trns,ctrns,tsts = None,ctsts = None,iter = 0):
plotdir = os.path.join(os.getcwd(),'plot')
pylab.clf()
try:
assert len(tsts) > 1
tstsplot = True
except:
tstsplot = False
try:
assert len(ctsts) > 1
ctstsplot = True
except:
ctstsplot = False
if tstsplot:
pylab.plot(tsts['input'],tsts['target'],c='b')
pylab.scatter(trns['input'],trns['target'],c='r')
pylab.scatter(trns['input'],ctrns,c='y')
if tstsplot and ctstsplot:
pylab.plot(tsts['input'], ctsts,c='g')

pylab.xlabel('x')
pylab.ylabel('y')
pylab.title('Neuron Number:'+str(nneuron))
pylab.grid(True)
plotname = os.path.join(plotdir,('jpq2layers_plot'+ str(iter)))
pylab.savefig(plotname)


# set-up the neural network
nneuron = 5
mom = 0.98
netname="LSL-"+str(nneuron)+"-"+str(mom)
mv=ModuleValidator()
v = Validator()
n=FeedForwardNetwork(name=netname)
inLayer = LinearLayer(1,name='in')
hiddenLayer = SigmoidLayer(nneuron,name='hidden0')
outLayer = LinearLayer(1,name='out')
biasinUnit = BiasUnit(name="bhidden0")
biasoutUnit = BiasUnit(name="bout")
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addModule(biasinUnit)
n.addModule(biasoutUnit)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer,hiddenLayer)
bias_to_hidden = FullConnection(biasinUnit,hiddenLayer)
bias_to_out = FullConnection(biasoutUnit,outLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)
n.addConnection(in_to_hidden)
n.addConnection(bias_to_hidden)
n.addConnection(bias_to_out)
n.addConnection(hidden_to_out)

n.sortModules()
n.reset()

#read the initail weight values from myparam2.txt
filetoopen = os.path.join(os.getcwd(),'myparam2.txt')
if os.path.isfile(filetoopen):
myfile = open('myparam2.txt','r')
c=[]
for line in myfile:
c.append(float(line))
n._setParameters(c)
else:
myfile = open('myparam2.txt','w')
for i in n.params:
myfile.write(str(i)+'\n')
myfile.close()

#activate the neural networks
act = SupervisedDataSet(1,1)
act.addSample((0.2,),(0.880422606518061,))
n.activateOnDataset(act)
#create the test DataSet
x = numpy.arange(0.0, 1.0+0.01, 0.01)
s = 0.5+0.4*numpy.sin(2*numpy.pi*x)
tsts = SupervisedDataSet(1,1)
tsts.setField('input',x.reshape(len(x),1))
tsts.setField('target',s.reshape(len(s),1))

#read the train DataSet from file
trndata = SupervisedDataSet.loadFromFile(os.path.join(os.getcwd(),'trndata'))

#create the trainer

t = BackpropTrainer(n, learningrate = 0.01 ,
momentum = mom)
#train the neural network from the train DataSet

cterrori=1.0
print "trainer momentum:"+str(mom)
for iter in range(25):
t.trainOnDataset(trndata, 1000)
ctrndata = mv.calculateModuleOutput(n,trndata)
cterr = v.MSE(ctrndata,trndata['target'])
relerr = abs(cterr-cterrori)
cterrori = cterr
print 'iteration:',iter+1,'MSE error:',cterr
myplot(trndata,ctrndata,iter=iter+1)
if cterr < 1.e-5 or relerr < 1.e-7:
break
#write the network using xml file
myneuralnet = os.path.join(os.getcwd(),'myneuralnet.xml')
if os.path.isfile(myneuralnet):
NetworkWriter.appendToFile(n,myneuralnet)
else:
NetworkWriter.writeToFile(n,myneuralnet)

#calculate the test DataSet based on the trained Neural Network
ctsts = mv.calculateModuleOutput(n,tsts)
tserr = v.MSE(ctsts,tsts['target'])
print 'MSE error on TSTS:',tserr
myplot(trndata,ctrndata,tsts,ctsts)

pylab.show()
16 changes: 16 additions & 0 deletions examples/supervised/Test Network Reader&Writer/myparam2.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
-0.876387117735
0.0228437748357
0.0936056466341
-0.90331132597
-0.117868958091
0.755572921676
-0.48378563418
-0.635899582263
1.43586286085
-0.791688941299
1.16894753711
1.47155607167
0.301809828737
0.814171589059
1.02308464693
-0.746364019039
Loading

0 comments on commit 8fe989d

Please sign in to comment.