c语言实现的粒子群算法代码及解释

合集下载

粒子群算法(PSO)程序(C#语言)

粒子群算法(PSO)程序(C#语言)

粒子群算法(PSO)程序(C#语言)超简洁的随机粒子群算法,PSO,程序,C,语言, using System; using System.Linq;using System.Collections.Generic;class MyPSO {const int NUM=40;//粒子数const int DIM=30;//维数const double c1=1.8;//参数const double c2=1.8;//参数static double xmin=-100.0;//位置下限static double xmax=100.0;//位置上限static double[] gbestx=new double[DIM];//全局最优位置static double gbestf;//全局最优适应度static Random rand=new Random();//用于生成随机数class particle {//定义一个粒子public double[] x=new double[DIM];//当前位置矢量public double[] bestx=new double[DIM];//历史最优位置public double f;//当前适应度public double bestf;//历史最优适应度}particle[] swarm=new particle[NUM];//定义粒子群double f1(double[] x) {//测试函数:超球函数return x.Sum(a => a*a);}static void Main(string[] args) {for(int i=0; i<DIM; i++)//初始化全局最优gbestx[i]=rand.NextDouble()*(xmax-xmin)+xmin; gbestf=double.MaxValue;for(int i=0; i<NUM; i++) {//初始化粒子群particle p1=new particle();for(int j=0; j<DIM; j++)p1.x[j]=rand.NextDouble()*(xmax-xmin)+xmin;p1.f=f1(p1.x);p1.bestf=double.MaxValue;swarm[i]=p1;}for(int t=0; t<5000; t++) {for(int i=0; i<NUM; i++) {particle p1=swarm[i];for(int j=0; j<DIM; j++)//进化方程p1.x[j]+=c1*rand.NextDouble()*(p1.bestx[j]-p1.x[j]) +c2*rand.NextDouble()*(gbestx[j]-p1.x[j]);p1.f=f1(p1.x);if(p1.f<p1.bestf) {//改变历史最优p1.x.CopyTo(p1.bestx, 0);p1.bestf=p1.f;}if(p1.f<gbestf) {//改变全局最优p1.x.CopyTo(gbestx, 0);for(int j=0; j<DIM; j++)//把当前全局最优的粒子随机放到另一位置p1.x[j]=rand.NextDouble()*(xmax-xmin)+xmin;gbestf=p1.f;}}}Console.WriteLine("{0}", gbestf);}}。

粒子群优化算法 简洁 c语言

粒子群优化算法 简洁 c语言

粒子群优化算法的C代码如下,运行后为什么提示:d:\program files\microsoft visual studio\vc98\include\eh.h(32) : fatal error C1189: #error : "eh.h is only for C++!"费解了。

由于学matlab,我C语言长时间未碰,有点生疏。

还请各位多多指导,真的谢谢了!#include<iostream>#include<fstream>#include<cmath>#include<ctime>#include<string>#include<iomanip>#include<cstdlib>using namespace std;#include"ran_number.h"const double pi=3.1415926;const int num=60; /*粒子群规模*/const int T=2000; /*最大迭代次数*///**************适应度函数*************//double func(double x){double f=x*sin(10*x*pi)+2.0;f=1.0/f;return f;}//----------------主程序----------------//void main(int argc,char* argv[]){double ran; //随机变量RandomNumber rand;double c1=2.05,c2=2.05; //学习因子double d1=0.2,d2=0.7; //控制因子double ws=0.9,we=0.4; //最大最小惯性权重double x_down=-1.0,x_up=2.0; //粒子空间上下界double w; //惯性权重int m=2;double Vmax; //粒子最大速度double x[num]; //粒子位置(控制参数)double v[num]; //粒子速度double g[num]; //粒子适应度double pbest[num]; //粒子个体最好值double pbest_x[num]; //粒子个体最好位值double gbest; //粒子群体最好值double gbest_x; //粒子群体最好位置Vmax=(x_up-x_down)/8;//--初始化粒子群:位置、速度、个体最优值for(int i=0;i<num;i++){pbest[i]=0.0;ran=rand.fRandom();x[i]=x_down+(x_up-x_down)*ran;v[i]=Vmax*(2*ran-1); //初始化粒子速度pbest[i]=func(x[i]);pbest_x[i]=x[i];}gbest=pbest[0];gbest_x=x[0];//--------------第一次迭代得到的群体最优----------//for(i=0;i<num;i++) // for 粒子数循环{if(pbest[i]<gbest){gbest=pbest[i];gbest_x=pbest_x[i];}}//---------------共T次迭代过程---------------//while(m<=T) //迭代次数循环{w=(ws-we-d1)*exp(1/(1+d2*(m-1)/t)); //惯性权重for(i=0;i<num;i++) //粒子数循环{g[i]=0.0;ran=rand.fRandom();v[i]=w*v[i]+c1*ran*(pbest_x[i]-x[i])+c2*ran*(gbest_x-x[i]);x[i]+=v[i];if(x[i]<x_down){x[i]=x_down;} //保证粒子位置不能超越下界if(x[i]>x_up){x[i]=x_up;} //保证粒子位置不能超出上界g[i]=func(x[i]);if(pbest[i]>g[i]){pbest[i]=g[i];pbest_x[i]=x[i];}if(pbest[i]<gbest){gbest=pbest[i];gbest_x=pbest_x[i];}printf("第%d次迭代的群体最优值及最优位置为%lf %lf ",m,gbest,gbest_x); //输出第m次迭代的群体最优值及最优位置} //结束for循环m=m+1;} //结束while循环printf("全局最优解及全局最优位置为%lf %lf ",gbest,gbest_x);下面为改正点:注意: vc在处理.c 文件是按C 语言编译的,所以假如有以下文件: 1.c#include <fstream>int main(int argc, char* argv[]){return 0;}那么编译时就会出错:fatal error C1189: #error : "eh.h is only for C++!"这是因为fstream标准库要求用到eh.h文件,而Exception Handling的实现需要c++支持。

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子群算法程序

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子群算法程序

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子
群算法程序
本程序使用粒子群算法来求解墨西哥草帽函数的最小值。

墨西哥草帽函数是一个常用的测试函数,其数学表达式为:
f(x,y) = -sin(sqrt(x^2 + y^2)) / (0.01(x^2 + y^2) + 1) 本程序使用随机生成的初始粒子群,通过不断迭代更新粒子的位置和速度,找到墨西哥草帽函数的全局最小值。

具体实现过程如下:
1. 首先定义墨西哥草帽函数,包括输入参数x和y,输出参数f。

2. 然后定义初始粒子群,包括粒子数目、初始位置和速度等参数。

3. 迭代更新粒子的位置和速度,直到满足停止条件(如迭代次数达到一定值或粒子群的最优解与全局最优解之间的误差达到一定值)。

4. 在迭代过程中,记录每个粒子的最优位置和全局最优位置,并更新粒子群的最优解。

5. 最后输出墨西哥草帽函数的最小值以及对应的最优解。

程序实现可以使用c语言或matlab语言,具体实现方式可以根据个人喜好和实际情况进行选择。

- 1 -。

C++实现粒子群优化算法代码

C++实现粒子群优化算法代码

#include<iostream>#include<cmath>#include<ctime>#include<cstdlib>using namespace std;const int pNum = 10;const int generation = 100;const int dim = 2;const double low = -10;const double high = 10;const double vMax = 10;const double w = 0.5;const double c1 = 2;const double c2 = 2;double p[pNum][dim]; //粒子群体double pv[pNum][dim]; //速度向量double pBest[pNum][dim]; //每个粒子的局部最优向量double pFitness[pNum]; //每个粒子的最优最适值double gFitness; //全局最优适应值double gBest[dim]; //全局最优向量double fitness(double p[]){return p[0] * p[0] + p[1] * p[1]; //根据自己的需要更改函数的适应值}void initialize(){for (int i = 0; i < pNum; i++){for (int j = 0; j < dim; j++){p[i][j] = low + (high - low) * 1.0 * rand() / RAND_MAX;pBest[i][j] = p[1][j];pv[i][j] = -vMax + 2.0 * vMax * rand() / RAND_MAX;}}for (int i = 0; i < pNum; i++)pFitness[i] = fitness(p[i]);gFitness = pFitness[0];for (int i = 0; i < dim; i++){gBest[i] = pBest[0][i];}for (int i = 1; i < pNum; i++){if (gFitness > pFitness[i]){gFitness = pFitness[i];for (int j = 0; j < dim; j++){gBest[j] = pBest[i][j];}}}}void update(){for (int i = 0; i < pNum; i++){for (int j = 0; j < dim; j++){pv[i][j] = w * pv[i][j] + c1 * rand() / RAND_MAX * (pBest[i][j] - p[i][j]) + c2 * rand() / RAND_MAX * (gBest[j] - p[i][j]);if (pv[i][j] < -vMax)pv[i][j] = -vMax;if (pv[i][j] > vMax)pv[i][j] = vMax;p[i][j] = p[i][j] + pv[i][j];if (p[i][j] > high)p[i][j] = high;if (p[i][j] < low)p[i][j] = low;}}}void get_pBest(){for (int i = 0; i < pNum; i++){if (pFitness[i] > fitness(p[i])){pFitness[i] = fitness(p[i]);for (int j = 0; j < dim; j++){pBest[i][j] = p[i][j];}}}}void get_gBest(){for (int i = 0; i < pNum; i++){if (pFitness[i] < gFitness){for (int j = 0; j < dim; j++){gBest[j] = pBest[i][j];}gFitness = pFitness[i];}}}int main(){srand(time(0));initialize();for (int n = 0; n < generation; n++){update();get_pBest();get_gBest();}cout << "gBest is (";for (int i = 0; i < dim - 1; i++)cout << gBest[i] << ",";cout << gBest[dim - 1] << ")" << endl;cout << "the best fitness is " << gFitness << endl;return 0;}。

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子群算法程序

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子群算法程序

用c语言或matlab语言求解墨西哥草帽函数最小值的粒子群算法程序以下是一种用C语言实现的基本粒子群算法程序,用于求解墨西哥草帽函数的最小值。

#include <stdio.h>#include <stdlib.h>#include <math.h>#define SWARM_SIZE 50 // 粒子群规模#define MAX_ITERATION 1000 // 最大迭代次数#define DIMENSION 2 // 问题维度#define INERTIA_WEIGHT 0.7 // 惯性权重#define COGNITIVE_WEIGHT 1.4 // 认知权重#define SOCIAL_WEIGHT 1.4 // 社会权重#define LOWER_BOUND -10.0 // 自变量下界#define UPPER_BOUND 10.0 // 自变量上界void evaluateFitness(double* positions, double* fitness);double mexicanHatFunction(double* x);double randDouble(double min, double max);int randInt(int min, int max);int maindouble swarm[SWARM_SIZE][DIMENSION]; // 粒子群位置double velocity[SWARM_SIZE][DIMENSION]; // 粒子群速度double bestPosition[DIMENSION]; // 全局最优位置double bestFitness = __DBL_MAX__; // 全局最优适应度double fitness[SWARM_SIZE]; // 粒子适应度//初始化粒子群for (int i = 0; i < SWARM_SIZE; i++)for (int j = 0; j < DIMENSION; j++)swarm[i][j] = randDouble(LOWER_BOUND, UPPER_BOUND);velocity[i][j] = 0.0;}evaluateFitness(swarm[i], &fitness[i]);if (fitness[i] < bestFitness)bestFitness = fitness[i];for (int k = 0; k < DIMENSION; k++)bestPosition[k] = swarm[i][k];}}//迭代求解for (int iter = 0; iter < MAX_ITERATION; iter++)for (int i = 0; i < SWARM_SIZE; i++)for (int j = 0; j < DIMENSION; j++)double r1 = randDouble(0.0, 1.0);double r2 = randDouble(0.0, 1.0);velocity[i][j] = INERTIA_WEIGHT * velocity[i][j] + COGNITIVE_WEIGHT * r1 * (bestPosition[j] - swarm[i][j]) + SOCIAL_WEIGHT * r2 * (bestPosition[j] - swarm[i][j]); swarm[i][j] += velocity[i][j];if (swarm[i][j] < LOWER_BOUND)swarm[i][j] = LOWER_BOUND;else if (swarm[i][j] > UPPER_BOUND)swarm[i][j] = UPPER_BOUND;}evaluateFitness(swarm[i], &fitness[i]);if (fitness[i] < bestFitness)bestFitness = fitness[i];for (int k = 0; k < DIMENSION; k++)bestPosition[k] = swarm[i][k];}}}printf("Best position: ");for (int i = 0; i < DIMENSION; i++)printf("%.4f ", bestPosition[i]);printf("\n");printf("Best fitness: %f\n", bestFitness);return 0;void evaluateFitness(double* positions, double* fitness)*fitness = mexicanHatFunction(positions);double mexicanHatFunction(double* x)double result = 0.0;result = -(pow(x[0], 2) + pow(x[1], 2)) + 2 * exp(-(pow(x[0], 2) + pow(x[1], 2)) / 2);return result;double randDouble(double min, double max)return min + ((double)rand( / RAND_MAX) * (max - min);int randInt(int min, int max)return min + rand( % (max - min + 1);这个程序使用经典的粒子群算法来求解墨西哥草帽函数的最小值。

6种粒子群算法程序

6种粒子群算法程序

6种粒子群算法程序程序1当c\\ = c i\ = 2 , c n = c22 = 1.5 , w = 1.2 oa)*主函数源程序(main.m)%------ 基本粒子群算法(particle swarm optimization)%------ 名称:基本粒子群算法%------ 初始格式化clear all; %清除所有变量C1C;%清屏format long; %将数据显示为长整形科学计数%……给定初始条条件............N=40; D=10; T=100; cl1=2;%3初始化群体个数%初始化群体维数%初始化群体最迭代次数%学习因子1c21=2;cl2=1.5;c22=1.5;w=L2; eps=10A(-6);%学习因子2%惯性权重%设置精度(在已知最小值的时候用)%……初始化种群个体(限定位置和速度)x=zeros(N,D);v=zeros(N,D); for i=l:Nfor j=l:Dx(lj)=randn; %随机初始化位置v(ij)=randn; %随机初始化速度end end%……显示群位置............... flgure(l)for j=l:Dif(rem(D,2)>0)subplot((D+l)/2,2J)elsesubplot(D/2,2,j)endplot(x(:,j);b*');grid on xlabelC 粒子■) ylabel(*初始位置J tlnfo=strcat(f第*har(j+48)J 维J;tlnfo=strcat(,M \cha r(floor(j/l 0)+48), char(rem(j,l 0)+48),'维');endtitle(tlnfo)end%-一-显示种群速度figure ⑵for j=l:Dif(rem(D^2)>0) subplot((D+l)/2,2J)elsesubplot(D/2,2J)endplot(x(:,j);b*');gricl on xlabelC 粒子') ylabel(,初始速度J tlnfo=strcatf 第,char(j+48),'维');if(j>9)tlnfo=strcat(* 第“har(floor(j/l())+48)9 char(rem(j,l 0)+48),'维);endtitle(tlnfo)endfigure ⑶%第一个图su bp lot( 1,2,1)%……初始化种群个体(在此限定速度和位置) .........xl=x;vl=v;%……初始化个体最优位置和最优值…pl=xl;pbestl=ones(NJ);for i=l:Npbestl (i)=fitness(x 1 (i,: ),D);end%……初始化全局最优位置和最优值 .........gl=l000*ones(l ,D);gbestl=1000;for i=l:Nlf(pbestl(i)<gbest 1)gl=pl(i,:);gbestl=pbestl(i);endendgbl=ones(l,T);%浸入主循环,按照公式依次迭代直到满足精度或者迭代次数…for i=l:Tfor J=1:NIf (fltness(xl(J,:),D)<pbestl(J))pl(J,:)=xl(|,:);pbestl (J)=fitness(x 1Q,: ),D);endlf(pbest 1 (])<gbest 1)gl=pl(j,:); gbestl=pbestl(J);endvl(j,:)=w*vl(J,:)+cll*rand*(pl(J,:)-xl(j,:))+c21*rand*(gl-xl(J,:));xl(J,:)=xl(J,:)+vl(j,:);endgbl(i)=gbestl;endplot(gbl)T empSt r=sprl ntf(• c 1 = %g ,c2=%g\cll,c21);title(TempStr);xiabelC迭代次数);ylabelC适应度值J;%第二个图subplot(l,2,2)%••…初始化种群个体(在此限定速度和位置) .........x2=x;v2=v;%••…初始化种群个体最有位置和最优解 .......p2=x2;pbest2=ones(N,l);for i=l:Npbest2(i)=fltness(x2(i,: ),D);end%••…初始化种全局最有位置和最优解……g2=1000*ones(l ,D);gbest2=1000;for i=X:Nif(pbest2(i)<gbest2)g2=p2(i,:);gbest2=pbest2(i);endendgb2=ones(l,T);%……浸入主循环,按照公式依次迭代直到满足精度或者迭代次数…for i=l:Tfor j=l:Nif (fitness(x2(J,:),D)<pbest2 ⑴)p2(J,:)=x2(j,:); pbest2(j)=fitness(x2(J,:),D);endif(pbest2(J)<gbest2)g2=p2(j,:);gbest2=pbest2(J);endV2(jv)=w*v2(j,:)+cl2*rand*(p2(j,:)-x2(j,:))+c22*rand*(g2-x2(j,:));x2(J,:)=x2(j,:)+v2(j,:);endgb2(i)=gbest2;endplot(gb2)TempStr=sprlntf(t cl= %g ,c2=%g\cl2,c22);title(TempStr);xiabclC迭代次数);ylabelC适应度值J;b)适应度函数%适应度函数(fitness.m)function result=fitness(x,D)sum=0;for i=l:Dsiim=sum+x(i)A2;endresult=sum;程序2当C11 =C2l =2 于C]2 =0,Cj2 = 2,w= 1.2 对比a)%主函数源程序(main.m)% ... 基本粒子群算法(particle swarm optimization)%……名称:基本粒子群算法%……初始格式化clear all; %清除所有变量clc; %清屏format long; %将数据显示为长整形科学计数cl2=0;c22=2;w=1.2; %惯性权重eps=10A(-6); %设置精度(在已知最小值的时候用) %……初始化种群个体(限定位置和速度) .........x=zeros(N,D);v=zeros(N,D);for i=l:Nfor j=l:Dx(lj)=randn; %随机初始化位置v(ij)=randn; %随机初始化速度endend%……显示群位置 ..............figure(l)for j=l:Dif(rem(D,2)>0)subplot((D+1 )/2,2 J)elsesubplot(D/2,2J)endplot(x(:J)/b*t);grid onxlabelC 粒子JylabelC初始位置JtInFo=strcatC 第’,char(j+48),1维f);if(j>9)tlnfo=strcat(f \char(floor(j/10)+48) t char(rem(j,10)+48),'维');endtitle(tlnfo)end%……显示种群速度flgure(2)for j=l:Dif(rem(D,2)>0)subplot((D+l)/2^2J)elsesubplot(D/2,2J)endplot(x(:J)/b*t);grid onxlabelC 粒子JylabelC初始速度Jtlnfo=strcat(v第,char(j+48),'维');if(j>9)tlnfo=strcat(,M t?char(floor(J/10)+48), char(rem(JJ0)+48)/ 维);encltitle(tlnfo)endfigure(3)%第一个图subplot(1^24)%……初始化种群个体(在此限定速度和位置)xl=x;vl=v;%……初始化个体最优位置和最优值…pl=xl;pbestl=ones(N,l);for i=l:Npbestl (i)=fitness(xl(i,:),D);end%……初始化全局最优位置和最优值 .........gl=l000*ones(l Q);gbestl=1000;for i=l:Nif(pbestl(i)<gbestl)gl=pl(i,:); gbestl=pbestl(i); endendgbl=ones(l,T);%••…浸入主循环,按照公式依次迭代直到满足精度或者迭代次数…for i=l:Tfor J=1:Nif (fitness(xl(J^:),D)<pbestl(J))pl(j,:)=xl(j,:);pbest 1 (J)=fitness(x 1 (J),D);endlf(pbestl (J )<gbest 1)gi=pia,:);gbestl=pbestl(j);endvl(J,:)=w*vl(J5:)+cll*ran(l*(pl(J,:)-xl(j,:))+c21*rand*(gl-xl(j,:));xl(j,:)=xl(J,:)+vl(j,:);endgbl(i)=gbestl;endplot(gbl)TempStr=sprlntf(,cl= %g 5c2=%g\cll,c21);title(TempStr);xlabef迭代次数);ylabelC适应度值J;%第二个图subplot( 1,2,2)%••…初始化种群个体(在此限定速度和位置) .........x2=x;v2=v;%-•…初始化种群个体最有位置和最优解 .......p2=x2;pbest2=ones(N,l);for i=l:Npbest2 ⑴=fltness(x2(i,:),D);end%-•…初始化种全局最有位置和最优解……g2=1000*ones(l Q);gbest2=1000;for i=l:Nif(pbest2(i)<gbest2)g2=p2(i,:); gbest2=pbest2(i);endendgb2=ones(l,T);%……浸入主循环,按照公式依次迭代宜到满足精度或者迭代次数…for i=l:Tfor j=l:Nif (fitness(x2(J,:),D)<pbest2(J))p2(j,:)=x2(j,:); pbest2(j)=fitness(x2(J,:),D);endif(pbest2(J)<gbest2)g2=p2(J,:); gbest2=pbest2(j);endv2Q,:)=w*v2(J,:)+cl2*rand*(p2(j,:)-x2(j,:))+c22*rand*(g2-x2(J,:));x2(J,:)=x2(j,:)+v2(j,:);endgb2(i)=gbest2;endplot(gb2)TempStr=sprlntf(,cl= %g ,c2=%f ,cl2,c22);title(TempStr);xlabef迭代次数);ylabelC适应度值J;b)适应度函数%适应度函数(fitness.ni)function result=fitness(x,D)sum=0;for i=l:Dsiim=sum+x(i)A2;end result=sum;程序3当 5 =c 2\ =2,闪=1.2于“2 = 2,C 22 =0,w = 1.2 对比a) %主函数源程序(main.m)% ... 基本粒子群算法(particle swarm optimization )%……名称:基本粒子群算法%……初始格式化clear all;clc;format long; %……给定初始条条件 ........... N=40;D=10;T=100; cl1=2;c21=2;cl2=2;%,初始化群体个数 %初始化群体维数 %初始化群体最迭代次数 %学习因子1 %学习因子2c22=0;w=L2; eps=10A (-6);%惯性权重 %设置精度(在已知最小值的时候用) %……初始化种群个体(限定位置和速度)x=zeros(N,D); v=zeros(N,D);for i=l:Nfor j=l:Dx(ij)=randn; %随机初始化位置 v(ij)=randn; %随机初始化速度 end end%……显示群位置 ..............flgure(l)for J=1:Dif(rem(D,2)>0)subplot((D+l)/2^2J)elsesubplot(D/2,2J)%清除所有变量 %清屏 %将数据显示为长整形科学计数plot(x(:J)/b*,);grid onxlabelC 粒子JylabelC初始位置Jtlnfo=strcat(f第;char(j+48),维J; if(j>9)tlnfo=strcat(,第;char(floor(j/10)+48), char(rem(ja0)+48);维J;encltitle(tlnfo)end%……显示种群速度flgure(2)for j=l:Dif(rem(D^2)>0)su bplot((D+1 )/2,2 J)elsesubplot(D/2,2J)endplot(x(: J;grid onxlabelC 粒子JylabelC初始速度') tInfo=strcatC 第,charQ+48),1维J; if(j>9)tlnfo=strcat(,第;chai*(floor(j/10)+48), char(rem(J40)+48)/ 维);end title(tlnfo)endflgure(3)%第一个图subplot(l929l)%……初始化种群个体(在此限定速度和位置) .........xl=x;vl=v;%……初始化个体最优位置和最优值…pl=xl;pbestl=ones(NJ);for i=l:Nend%……初始化全局最优位置和最优值 .........gl=l000*ones(l ,D);gbestl=1000;for i=l:Nlf(pbestl(i)<gbest 1)gl=pl(i,:);gbestl=pbestl(i);endendgbl=ones(l,T);%浸入主循环,按照公式依次迭代直到满足精度或者迭代次数…for i=l:Tfor J=1:NIf (fltness(xl(J,:),D)<pbestl(J))pl(J,:)=xl(|,:);pbestl (J)=fitness(x 1Q,: ),D);endlf(pbest 1 (])<gbest 1)gl=pl(j,:); gbestl=pbestl(J);endvl(j,:)=w*vl(J,:)+cll*rand*(pl(J,:)-xl(j,:))+c21*rand*(gl-xl(J,:));xl(J,:)=xl(J,:)+vl(j,:);endgbl(i)=gbestl;endplot(gbl)T empSt r=sprl ntf(• c 1 = %g ,c2=%g\cll,c21);title(TempStr);xiabelC迭代次数);ylabelC适应度值J;%第二个图subplot(l,2,2)%••…初始化种群个体(在此限定速度和位置) .........x2=x;v2=v;%••…初始化种群个体最有位置和最优解 .......p2=x2;pbest2=ones(N,l);for i=l:Npbest2(i)=fltness(x2(i,: ),D);end%••…初始化种全局最有位置和最优解……g2=1000*ones(l ,D);gbest2=1000;for i=X:Nif(pbest2(i)<gbest2)g2=p2(i,:);gbest2=pbest2(i);endendgb2=ones(l,T);%……浸入主循环,按照公式依次迭代直到满足精度或者迭代次数…for i=l:Tfor j=l:Nif (fitness(x2(J,:),D)<pbest2 ⑴)p2(J,:)=x2(j,:); pbest2(j)=fitness(x2(J,:),D);endif(pbest2(J)<gbest2)g2=p2(j,:);gbest2=pbest2(J);endv2(J,:)=w*v2(j,:)+cl2*rand*(p2(J,:)-x2(J,:))+c22*rand*(g2-x2(j,:));x2(J,:)=x2(j,:)+v2(j,:);endgb2(i)=gbest2;endplot(gb2)TempStr=sprlntf(f cl= %g ,c2=%g\cl2,c22);title(TempStr);xiabclC迭代次数);ylabelC适应度值J;b)适应度函数%适应度函数(fitness.m)function result=fitness(x,D)sum=0;for i=l:Dsiim=sum+x(i)A2;endresult=sum;程序4对c2, W2分别对其取值Cj =1.1, c2 =2, w, =1.2, w2 =1.5测试函数。

基本粒子群算法

基本粒子群算法

简单的基本粒子群算法#include <stdio.h>#include <stdlib.h>#include <time.h>#include <math.h>#define randf (double)(rand()%10000)/10000 using namespace std;const int dim = 30; // 粒子维数const int num = 100; // 粒子群中粒子的个数. const int c1 = 2; // 学习因子1const int c2 = 2; // 学习因子2const int w = 0.9; // 惯性权重因子double Xmax = 500;double Xmin = -500;double Vmax = 50;double Vmin = -50;double gbest_f; // 全局最优适应值.double gbest[dim]; // 全局最优位置.struct particle // 粒子结构.{double f;double pbest_f;double pbest[dim];double X[dim];double V[dim];} swarm[num]; // 粒子群定义.double fitness(); // 计算粒子适应度.void check(); // 检查粒子的速度和范围是否越界. void initial(); // 粒子群的随机初始化void fly(); // 粒子飞翔.double fitness ( double X[] ){double sum = 0;for ( int i = 0 ; i < dim ; i ++ ){double t = X[i];if ( t < 0 )t *= -1;sum += -1*X[i]*sin(sqrt(t));}return sum;}void check(int index){for ( int i = 0 ; i < dim ; i ++ ){if ( swarm[index].X[i] > Xmax )swarm[index].X[i] = Xmax;if ( swarm[index].X[i] < Xmin )swarm[index].X[i] = Xmin;if ( swarm[index].V[i] > Vmax )swarm[index].V[i] = Vmax;if ( swarm[index].V[i] < Vmin )swarm[index].V[i] = Vmin;}}void initial(){double interval0 = Xmax-Xmin;double interval1 = Vmax-Vmin;for ( int i = 0 ; i < num ; i ++ ){particle * p = &swarm[i];for ( int j = 0 ; j < dim ; j ++ ){p->X[j] = interval0 * randf + Xmin;p->V[j] = interval1 * randf + Vmin;}check(i);p->f = fitness(p->X);p->pbest_f = p->f;for ( int j = 0 ; j < dim ; j ++ )p->pbest[j] = p->X[j];}gbest_f = swarm[0].pbest_f;for ( int j = 0 ; j < dim ; j ++ )gbest[j] = swarm[0].X[j];}void fly (){int i,j;for ( i = 0 ; i < num ; i ++ ){particle * p = &swarm[i];for ( j = 0 ; j < dim ; j ++ ){p->V[j] = w*p->V[j] + c1*randf*( p->pbest[j]- p->X[j]) + c2*randf*(gbest[j]-p->X[j]);p->X[j] = p->V[j] + p->X[j];}check(i);p->f = fitness(p->X);if ( p->f < p->pbest_f ){p->pbest_f = p->f;for ( j = 0 ; j < dim ; j ++ )p->pbest[j] = p->X[j];}if ( p->f < gbest_f ){gbest_f = p->f;for ( j = 0 ; j < dim ; j ++ )gbest[j] = p->X[j];}}}int main() {srand((unsigned)time(NULL));initial();for ( int i = 0 ; i < 50000 ; i ++ )fly();double a = fitness(gbest);printf("%lf",a);return 0;}。

粒子群算法(PSO)源代码

粒子群算法(PSO)源代码

粒子群算法(PSO)源代码%%#################################################################### %%#### Particle swarm optimization%%#### With linkage operator%%#### Deepak devicharan july 2003%%#################################################################### %%## to apply this to different equations do the following%%## generate initial particles in a search space close to actual soln%%## fool around with no of iterations, no of particles, learning rates%%## for a truly generic PSO do the following%%## increase the number of particles , increase the variance%%## i.e let the particles cover a larger area of the search space%%## then fool around as always with the above thins%declare the parameters of the optimizationmax_iterations = 1000;no_of_particles = 50;dimensions = 1;delta_min = -0.003;delta_max = 0.003;c1 = 1.3;c2 = 1.3;%initialise the particles and teir velocity componentsfor count_x = 1:no_of_particlesfor count_y = 1:dimensionsparticle_position(count_x,count_y) = rand*10;particle_velocity(count_x,count_y) = rand;p_best(count_x,count_y) = particle_position(count_x,count_y);endend%initialize the p_best_fitness arrayfor count = 1:no_of_particlesp_best_fitness(count) = -1000;end%particle_position%particle_velocity%main particle swrm routinefor count = 1:max_iterations%find the fitness of each particle%change fitness function as per equation requiresd and dimensionsfor count_x = 1:no_of_particles%x = particle_position(count_x,1);%y = particle_position(count_x,2);%z = particle_position(count_x,3);%soln = x^2 - 3*y*x + z;%x = particle_position(count_x);%soln = x^2-2*x+1;x = particle_position(count_x);soln = x-7;if soln~=0current_fitness(count_x) = 1/abs(soln);elsecurrent_fitness =1000;endend%decide on p_best etc for each particlefor count_x = 1:no_of_particlesif current_fitness(count_x) > p_best_fitness(count_x)p_best_fitness(count_x) = current_fitness(count_x);for count_y = 1:dimensionsp_best(count_x,count_y) = particle_position(count_x,count_y);endendend%decide on the global best among all the particles[g_best_val,g_best_index] = max(current_fitness);%g_best contains the position of teh global bestfor count_y = 1:dimensionsg_best(count_y) = particle_position(g_best_index,count_y);end%update the position and velocity compponentsfor count_x = 1:no_of_particlesfor count_y = 1:dimensionsp_current(count_y) = particle_position(count_x,count_y);endfor count_y = 1:dimensionsparticle_velocity(count_y) = particle_velocity(count_y) + c1*rand*(p_best(count_y)-p_current(count_y)) + c2*rand*(g_best(count_y)-p_current(count_y)); particle_positon(count_x,count_y) = p_current(count_y) +particle_velocity(count_y);endendendg_bestcurrent_fitness(g_best_index)clear all, clc % pso exampleiter = 1000; % number of algorithm iterationsnp = 2; % number of model parametersns = 10; % number of sets of model parametersWmax = 0.9; % maximum inertial weightWmin = 0.4; % minimum inertial weightc1 = 2.0; % parameter in PSO methodologyc2 = 2.0; % parameter in PSO methodologyPmax = [10 10]; % maximum model parameter valuePmin = [-10 -10]; % minimum model parameter valueVmax = [1 1]; % maximum change in model parameterVmin = [-1 -1]; % minimum change in model parametermodelparameters(1:np,1:ns) = 0; % set all model parameter estimates for all model parameter sets to zeromodelparameterchanges(1:np,1:ns) = 0; % set all change in model parameter estimates for all model parameter sets to zerobestmodelparameters(1:np,1:ns) = 0; % set best model parameter estimates for all model parameter sets to zerosetbestcostfunction(1:ns) = 1e6; % set best cost function of each model parameter set to a large numberglobalbestparameters(1:np) = 0; % set best model parameter values for all model parameter sets to zerobestparameters = globalbestparameters'; % best model parameter values for all model parameter sets (to plot)globalbestcostfunction = 1e6; % set best cost function for all model parameter sets to a large numberi = 0; % indicates ith algorithm iterationj = 0; % indicates jth set of model parametersk = 0; % indicates kth model parameterfor k = 1:np % initializationfor j = 1:nsmodelparameters(k,j) = (Pmax(k)-Pmin(k))*rand(1) + Pmin(k); % randomly distribute model parametersmodelparameterchanges(k,j) = (Vmax(k)-Vmin(k))*rand(1) + Vmin(k); % randomly distribute change in model parametersendendfor i = 2:iterfor j = 1:nsx = modelparameters(:,j);% calculate cost functioncostfunction = 105*(x(2)-x(1)^2)^2 + (1-x(1))^2;if costfunction < setbestcostfunction(j) % best cost function for jth set of model parameters bestmodelparameters(:,j) = modelparameters(:,j);setbestcostfunction(j) = costfunction;endif costfunction < globalbestcostfunction % best cost function for all sets of model parameters globalbestparameters = modelparameters(:,j);bestparameters(:,i) = globalbestparameters;globalbestcostfunction(i) = costfunction;elsebestparameters(:,i) = bestparameters(:,i-1);globalbestcostfunction(i) = globalbestcostfunction(i-1);endendW = Wmax - i*(Wmax-Wmin)/iter; % compute inertial weightfor j = 1:ns % update change in model parameters and model parametersfor k = 1:npmodelparameterchanges(k,j) = W*modelparameterchanges(k,j) + c1*rand(1)*(bestmodelparameters(k,j)-modelparameters(k,j))...+ c2*rand(1)*(globalbestparameters(k) - modelparameters(k,j));if modelparameterchanges(k,j) < -Vmax(k), modelparameters(k,j) = modelparameters(k,j) - Vmax(k); endif modelparameterchanges(k,j) > Vmax(k), modelparameters(k,j) = modelparameters(k,j) + Vmax(k); endif modelparameterchanges(k,j) > -Vmax(k) & modelparameterchanges(k,j) < Vmax(k), modelparameters(k,j) = modelparameters(k,j) + modelparameterchanges(k,j); end if modelparameters(k,j) < Pmin(k), modelparameters(k,j) = Pmin(k); endif modelparameters(k,j) > Pmax(k), modelparameters(k,j) = Pmax(k); endendendiendbp = bestparameters; index = linspace(1,iter,iter);figure; semilogy(globalbestcostfunction,'k');set(gca,'FontName','Arial','Fontsize',14); axis tight;xlabel('iteration'); ylabel('cost function');figure; q = plot(index,bp(1,,'k-',index,bp(2,,'k:');set(gca,'FontName','Arial','Fontsize',14); axis tight;legend(q,'x_1','x_2'); xlabel('iteration'); ylabel('parameter')。

  1. 1、下载文档前请自行甄别文档内容的完整性,平台不提供额外的编辑、内容补充、找答案等附加服务。
  2. 2、"仅部分预览"的文档,不可在线预览部分如存在完整性等问题,可反馈申请退款(可完整预览的文档不适用该条件!)。
  3. 3、如文档侵犯您的权益,请联系客服反馈,我们会尽快为您处理(人工客服工作时间:9:00-18:30)。
{
int i;
double sum=0.0;
for(i=0;i<dim-1; i++)
{
sum+= 100*(a[i+1]-a[i]*a[i])*(a[i+1]-a[i]*a[i])+(a[i]-1)*(a[i]-1);
}
return sum;
}
double Rastrigin(double a[])
{
for(i=0; i<dim; i++) //更新全局最优向量
{
particle_glo_best[i] = particle_loc_best[j][i];
}
}
for(i=0; i<P_num; i++) //更新个体速度
{
for(j=0; j<dim; j++)
{
particle_v[i][j]=w*particle_v[i][j]+
{
particle_v[i][j] = V_max;
}
if(particle_v[i][j] < -V_max)
{
particle_v[i][j] = -V_max;
}
}
}
}
int main()
{
freopen("result.txt","a+",stdout);
int i=0;
srand((unsigned)time(NULL));
initial();
while(i < iter_num)
{
renew_particle();
renew_var();
i++;
}
printf("粒子个数:%d\n",P_num);
printf("维度为:%d\n",dim);
printf("最优值为%.10lf\n", gfit);
return 0;
void initial()
{
int i,j;
for(i=0; i<P_num; i++) //随即生成粒子
{
for(j=0; j<dim; j++)
{
particle[i][j] = low+(high-low)*1.0*rand()/RAND_MAX; //初始化群体
particle_loc_best[i][j] = particle[i][j]; //将当前最优结果写入局部最优集合
#define high 100
#define iter_num 1000
#define V_max 20 //速度范围
#define c1 2
#define c2 2
#define w 0.5
#define alp 1
double particle[P_num][dim]; //个体集合
double particle_loc_best[P_num][dim]; //每个个体局部最优向量
{
particle_fit[i] = fitness(particle[i]);
if(particle_fit[i] < particle_loc_fit[i]) //更新个体局部最优值
{
particle_loc_fit[i] = particle_fit[i];
for(j=0; j<dim; j++) //更新局部最优向量
if(particle[i][j] > high)
{
parti cle[i][j] = high;
}
if(particle[i][j] < low)
{
particle[i][j] = low;
}
}
}
}
void renew_var()
{
int i, j;
for(i=0; i<P_num; i++) //计算每个粒子的适应度
particle_v[i][j] = -V_max+2*V_max*1.0*rand()/RAND_MAX; //速度
}
}
for(i=0; i<P_num; i++) //计算每个粒子的适应度
{
particle_fit[i] = fitness(particle[i]);
particle_loc_fitble particle_loc_fit[P_num]; //个体的局部最优适应度,有局部最优向量计算而来
double particle_glo_best[dim]; //全局最优向量
double gfit; //全局最优适应度,有全局最优向量计算而来
double particle_v[P_num][dim]; //记录每个个体的当前代速度向量
c1*1.0*rand()/RAND_MAX*(particle_loc_best[i][j]-particle[i][j])+
c2*1.0*rand()/RAND_MAX*(particle_glo_best[j]-particle[i][j]);
if(particle_v[i][j] > V_max)
double particle_fit[P_num]; //记录每个粒子的当前代适应度
double Sphere(double a[])
{
int i;
double sum=0.0;
for(i=0; i<dim; i++)
{
sum+=a[i]*a[i];
}
return sum;
}
double Rosenbrock(double a[])
{
particle_glo_best[i] = particle_loc_best[j][i];
}
}
void renew_particle()
{
int i,j;
for(i=0; i<P_num; i++) //更新个体位置生成位置
{
for(j=0; j<dim; j++)
{
particle[i][j] += alp*particle_v[i][j];
}
gfit = particle_loc_fit[0]; //找出全局最优
j=0;
for(i=1; i<P_num; i++)
{
if(particle_loc_fit[i]<gfit)
{
gfit = particle_loc_fit[i];
j = i;
}
}
for(i=0; i<dim; i++) //更新全局最优向量
//粒子群PSO算法
#include<stdio.h>
#include<math.h>
#include<time.h>
#include<stdlib.h>
#definePI 3.141592653589/* */
#define P_num 200 //粒子数目
#define dim 50
#define low -100 //搜索域范围
}
{
int i;
double sum=0.0;
for(i=0;i<dim;i++)
{
sum+=a[i]*a[i]-10.0*cos(2*PI*a[i])+10.0;
}
return sum;
}
double fitness(double a[]) //适应度函数
{
return Rastrigin(a);
}
{
particle_loc_best[i][j] = particle[i][j];
}
}
}
for(i=0,j=-1; i<P_num; i++) //更新全局变量
{
if(particle_loc_fit[i]<gfit)
{
gfit = particle_loc_fit[i];
j = i;
}
}
if(j != -1)
相关文档
最新文档