PSOIndividual.py
1 import numpy as np 2 import ObjFunction 3 import copy 4 5 6 class PSOIndividual: 7 8 ‘‘‘ 9 individual of PSO 10 ‘‘‘ 11 12 def __init__(self, vardim, bound): 13 ‘‘‘ 14 vardim: dimension of variables 15 bound: boundaries of variables 16 ‘‘‘ 17 self.vardim = vardim 18 self.bound = bound 19 self.fitness = 0. 20 21 def generate(self): 22 ‘‘‘ 23 generate a rondom chromsome 24 ‘‘‘ 25 len = self.vardim 26 rnd = np.random.random(size=len) 27 self.chrom = np.zeros(len) 28 self.velocity = np.random.random(size=len) 29 for i in xrange(0, len): 30 self.chrom[i] = self.bound[0, i] + 31 (self.bound[1, i] - self.bound[0, i]) * rnd[i] 32 self.bestPosition = np.zeros(len) 33 self.bestFitness = 0. 34 35 def calculateFitness(self): 36 ‘‘‘ 37 calculate the fitness of the chromsome 38 ‘‘‘ 39 self.fitness = ObjFunction.GrieFunc( 40 self.vardim, self.chrom, self.bound)
PSO.py
1 import numpy as np 2 from PSOIndividual import PSOIndividual 3 import random 4 import copy 5 import matplotlib.pyplot as plt 6 7 8 class ParticleSwarmOptimization: 9 10 ‘‘‘ 11 the class for Particle Swarm Optimization 12 ‘‘‘ 13 14 def __init__(self, sizepop, vardim, bound, MAXGEN, params): 15 ‘‘‘ 16 sizepop: population sizepop 17 vardim: dimension of variables 18 bound: boundaries of variables 19 MAXGEN: termination condition 20 params: algorithm required parameters, it is a list which is consisting of[w, c1, c2] 21 ‘‘‘ 22 self.sizepop = sizepop 23 self.vardim = vardim 24 self.bound = bound 25 self.MAXGEN = MAXGEN 26 self.params = params 27 self.population = [] 28 self.fitness = np.zeros((self.sizepop, 1)) 29 self.trace = np.zeros((self.MAXGEN, 2)) 30 31 def initialize(self): 32 ‘‘‘ 33 initialize the population of pso 34 ‘‘‘ 35 for i in xrange(0, self.sizepop): 36 ind = PSOIndividual(self.vardim, self.bound) 37 ind.generate() 38 self.population.append(ind) 39 40 def evaluation(self): 41 ‘‘‘ 42 evaluation the fitness of the population 43 ‘‘‘ 44 for i in xrange(0, self.sizepop): 45 self.population[i].calculateFitness() 46 self.fitness[i] = self.population[i].fitness 47 if self.population[i].fitness > self.population[i].bestFitness: 48 self.population[i].bestFitness = self.population[i].fitness 49 self.population[i].bestIndex = copy.deepcopy( 50 self.population[i].chrom) 51 52 def update(self): 53 ‘‘‘ 54 update the population of pso 55 ‘‘‘ 56 for i in xrange(0, self.sizepop): 57 self.population[i].velocity = self.params[0] * self.population[i].velocity + self.params[1] * np.random.random(self.vardim) * ( 58 self.population[i].bestPosition - self.population[i].chrom) + self.params[2] * np.random.random(self.vardim) * (self.best.chrom - self.population[i].chrom) 59 self.population[i].chrom = self.population[ 60 i].chrom + self.population[i].velocity 61 62 def solve(self): 63 ‘‘‘ 64 the evolution process of the pso algorithm 65 ‘‘‘ 66 self.t = 0 67 self.initialize() 68 self.evaluation() 69 best = np.max(self.fitness) 70 bestIndex = np.argmax(self.fitness) 71 self.best = copy.deepcopy(self.population[bestIndex]) 72 self.avefitness = np.mean(self.fitness) 73 self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness 74 self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness 75 print("Generation %d: optimal function value is: %f; average function value is %f" % ( 76 self.t, self.trace[self.t, 0], self.trace[self.t, 1])) 77 while self.t < self.MAXGEN - 1: 78 self.t += 1 79 self.update() 80 self.evaluation() 81 best = np.max(self.fitness) 82 bestIndex = np.argmax(self.fitness) 83 if best > self.best.fitness: 84 self.best = copy.deepcopy(self.population[bestIndex]) 85 self.avefitness = np.mean(self.fitness) 86 self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness 87 self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness 88 print("Generation %d: optimal function value is: %f; average function value is %f" % ( 89 self.t, self.trace[self.t, 0], self.trace[self.t, 1])) 90 91 print("Optimal function value is: %f; " % self.trace[self.t, 0]) 92 print "Optimal solution is:" 93 print self.best.chrom 94 self.printResult() 95 96 def printResult(self): 97 ‘‘‘ 98 plot the result of pso algorithm 99 ‘‘‘ 100 x = np.arange(0, self.MAXGEN) 101 y1 = self.trace[:, 0] 102 y2 = self.trace[:, 1] 103 plt.plot(x, y1, ‘r‘, label=‘optimal value‘) 104 plt.plot(x, y2, ‘g‘, label=‘average value‘) 105 plt.xlabel("Iteration") 106 plt.ylabel("function value") 107 plt.title("Particle Swarm Optimization algorithm for function optimization") 108 plt.legend() 109 plt.show()
运行程序:
1 if __name__ == "__main__": 2 3 bound = np.tile([[-600], [600]], 25) 4 pso = PSO(60, 25, bound, 1000, [0.7298, 1.4962, 1.4962]) 5 pso.solve()
ObjFunction见简单遗传算法-python实现。
时间: 2024-10-31 09:47:30