各位用户为了找寻关于python实现简单遗传算法的资料费劲了很多周折。这里教程网为您整理了关于python实现简单遗传算法的相关资料,仅供查阅,以下为您介绍关于python实现简单遗传算法的详细内容

ObjFunction.py

? 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 import math     def GrieFunc(vardim, x, bound):  """  Griewangk function  """  s1 = 0.  s2 = 1.  for i in range(1, vardim + 1):   s1 = s1 + x[i - 1] ** 2   s2 = s2 * math.cos(x[i - 1] / math.sqrt(i))  y = (1. / 4000.) * s1 - s2 + 1  y = 1. / (1. + y)  return y     def RastFunc(vardim, x, bound):  """  Rastrigin function  """  s = 10 * 25  for i in range(1, vardim + 1):   s = s + x[i - 1] ** 2 - 10 * math.cos(2 * math.pi * x[i - 1])  return s

GAIndividual.py

? 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 import numpy as np import ObjFunction     class GAIndividual:    '''  individual of genetic algorithm  '''    def __init__(self, vardim, bound):   '''   vardim: dimension of variables   bound: boundaries of variables   '''   self.vardim = vardim   self.bound = bound   self.fitness = 0.    def generate(self):   '''   generate a random chromsome for genetic algorithm   '''   len = self.vardim   rnd = np.random.random(size=len)   self.chrom = np.zeros(len)   for i in xrange(0, len):    self.chrom[i] = self.bound[0, i] +     (self.bound[1, i] - self.bound[0, i]) * rnd[i]    def calculateFitness(self):   '''   calculate the fitness of the chromsome   '''   self.fitness = ObjFunction.GrieFunc(    self.vardim, self.chrom, self.bound)

GeneticAlgorithm.py

? 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 import numpy as np from GAIndividual import GAIndividual import random import copy import matplotlib.pyplot as plt     class GeneticAlgorithm:    '''  The class for genetic algorithm  '''    def __init__(self, sizepop, vardim, bound, MAXGEN, params):   '''   sizepop: population sizepop   vardim: dimension of variables   bound: boundaries of variables   MAXGEN: termination condition   param: algorithm required parameters, it is a list which is consisting of crossover rate, mutation rate, alpha   '''   self.sizepop = sizepop   self.MAXGEN = MAXGEN   self.vardim = vardim   self.bound = bound   self.population = []   self.fitness = np.zeros((self.sizepop, 1))   self.trace = np.zeros((self.MAXGEN, 2))   self.params = params    def initialize(self):   '''   initialize the population   '''   for i in xrange(0, self.sizepop):    ind = GAIndividual(self.vardim, self.bound)    ind.generate()    self.population.append(ind)    def evaluate(self):   '''   evaluation of the population fitnesses   '''   for i in xrange(0, self.sizepop):    self.population[i].calculateFitness()    self.fitness[i] = self.population[i].fitness    def solve(self):   '''   evolution process of genetic algorithm   '''   self.t = 0   self.initialize()   self.evaluate()   best = np.max(self.fitness)   bestIndex = np.argmax(self.fitness)   self.best = copy.deepcopy(self.population[bestIndex])   self.avefitness = np.mean(self.fitness)   self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness   self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness   print("Generation %d: optimal function value is: %f; average function value is %f" % (    self.t, self.trace[self.t, 0], self.trace[self.t, 1]))   while (self.t < self.MAXGEN - 1):    self.t += 1    self.selectionOperation()    self.crossoverOperation()    self.mutationOperation()    self.evaluate()    best = np.max(self.fitness)    bestIndex = np.argmax(self.fitness)    if best > self.best.fitness:     self.best = copy.deepcopy(self.population[bestIndex])    self.avefitness = np.mean(self.fitness)    self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness    self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness    print("Generation %d: optimal function value is: %f; average function value is %f" % (     self.t, self.trace[self.t, 0], self.trace[self.t, 1]))     print("Optimal function value is: %f; " %     self.trace[self.t, 0])   print "Optimal solution is:"   print self.best.chrom   self.printResult()    def selectionOperation(self):   '''   selection operation for Genetic Algorithm   '''   newpop = []   totalFitness = np.sum(self.fitness)   accuFitness = np.zeros((self.sizepop, 1))     sum1 = 0.   for i in xrange(0, self.sizepop):    accuFitness[i] = sum1 + self.fitness[i] / totalFitness    sum1 = accuFitness[i]     for i in xrange(0, self.sizepop):    r = random.random()    idx = 0    for j in xrange(0, self.sizepop - 1):     if j == 0 and r < accuFitness[j]:      idx = 0      break     elif r >= accuFitness[j] and r < accuFitness[j + 1]:      idx = j + 1      break    newpop.append(self.population[idx])   self.population = newpop    def crossoverOperation(self):   '''   crossover operation for genetic algorithm   '''   newpop = []   for i in xrange(0, self.sizepop, 2):    idx1 = random.randint(0, self.sizepop - 1)    idx2 = random.randint(0, self.sizepop - 1)    while idx2 == idx1:     idx2 = random.randint(0, self.sizepop - 1)    newpop.append(copy.deepcopy(self.population[idx1]))    newpop.append(copy.deepcopy(self.population[idx2]))    r = random.random()    if r < self.params[0]:     crossPos = random.randint(1, self.vardim - 1)     for j in xrange(crossPos, self.vardim):      newpop[i].chrom[j] = newpop[i].chrom[       j] * self.params[2] + (1 - self.params[2]) * newpop[i + 1].chrom[j]      newpop[i + 1].chrom[j] = newpop[i + 1].chrom[j] * self.params[2] +       (1 - self.params[2]) * newpop[i].chrom[j]   self.population = newpop    def mutationOperation(self):   '''   mutation operation for genetic algorithm   '''   newpop = []   for i in xrange(0, self.sizepop):    newpop.append(copy.deepcopy(self.population[i]))    r = random.random()    if r < self.params[1]:     mutatePos = random.randint(0, self.vardim - 1)     theta = random.random()     if theta > 0.5:      newpop[i].chrom[mutatePos] = newpop[i].chrom[       mutatePos] - (newpop[i].chrom[mutatePos] - self.bound[0, mutatePos]) * (1 - random.random() ** (1 - self.t / self.MAXGEN))     else:      newpop[i].chrom[mutatePos] = newpop[i].chrom[       mutatePos] + (self.bound[1, mutatePos] - newpop[i].chrom[mutatePos]) * (1 - random.random() ** (1 - self.t / self.MAXGEN))   self.population = newpop    def printResult(self):   '''   plot the result of the genetic algorithm   '''   x = np.arange(0, self.MAXGEN)   y1 = self.trace[:, 0]   y2 = self.trace[:, 1]   plt.plot(x, y1, 'r', label='optimal value')   plt.plot(x, y2, 'g', label='average value')   plt.xlabel("Iteration")   plt.ylabel("function value")   plt.title("Genetic algorithm for function optimization")   plt.legend()   plt.show()

运行程序:

? 1 2 3 4 5 if __name__ == "__main__":    bound = np.tile([[-600], [600]], 25)  ga = GA(60, 25, bound, 1000, [0.9, 0.1, 0.5])  ga.solve()

作者:Alex Yu 出处:http://www.cnblogs.com/biaoyu/

以上就是python实现简单遗传算法的详细内容,更多关于python 遗传算法的资料请关注服务器之家其它相关文章!

原文链接:https://www.cnblogs.com/biaoyu/p/4857881.html