MapReduce与遗传算法、MapReduce与粒子群算法结合与实现
遗传算法(大白话解析遗传算法):http://www.cnblogs.com/heaad/archive/2010/12/23/1914725.html
Java代码用遗传算法解决0-1背包:http://wenku.baidu.com/view/20beb6da6f1aff00bed51ea8.html
MapReduce和遗传算法结合:(参考文献:MapReduce-GA-eScience2008)
Map——评估每个个体
输入: (key, value) (个体索引, 个体)
输出: (key,value) (相同默认的键值, 个体)
Reduce——选择局部最优个体
输出: (key,value) (个体, 1)
Reduce——全局最优
输出: (key, value) (最优个体,1)
Coordinator——交叉、变异
二、MapReduce和粒子群算法结合:(参考文献:Parallel PSOUsing MapReduce)
Map——得到最优个体解
输入:(key, value)(粒子索引,粒子状态:包括相邻节点、位置坐标、速度、位置值、个人最优位置、个人最 优值、全局最优位置、全局最优值)
输出:(key, value) (最优粒子索引,最优粒子状态:同上)
Reduce——得到最优全局解
输出:(key, value) (全局最优粒子索引,全局最优粒子状态:同上)
- 产生初始粒子
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 | import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Random; public class psotest { private final int iRang= 30; static int dim =2; static int sizepop =20; double sum1 = 0; double sum2 = 0; double g = 10000; private Random random =new Random(); public double[][] pop =new double[sizepop][dim]; public double[][] dpbest= new double[sizepop][dim]; public double[][] V =new double[sizepop][dim]; public double[] fitness= new double[sizepop]; public double[] gbest =new double[dim]; public double[]fitnessgbest = new double[sizepop]; public double[]bestfitness = new double[sizepop]; int m_iTempPos =999; public void c() { for(int i = 0; i < sizepop;i++) { for(int j= 0; j < dim; j++) { pop[i][j] = (random.nextDouble() - 0.5) * 2 *iRang; V[i][j] = dpbest[i][j] =pop[i][j]; } for(int j= 0; j < dim; j++) { sum1 += Math.pow(pop[i][j],2); sum2 +=Math.cos(2*Math.PI*pop[i][j]); } fitness[i]= -20 * Math.exp(-0.2 * Math.sqrt((1.0 / dim) * sum1)) - Math.exp((1.0 / dim) * sum2) + 20 +2.72; bestfitness[i] = 10000; if(bestfitness[i] > fitness[i]) { bestfitness[i] =fitness[i]; for(int j = 0; j< dim; j++) { dpbest[i][j] = pop[i][j]; } } } for(int i = 0; i < sizepop;i++) { if(bestfitness[i] < g) { g = bestfitness[i]; m_iTempPos = i; } } if (m_iTempPos != 999) { for (int j= 0; j < dim; j++) { gbest[j] =dpbest[m_iTempPos][j]; } } } public static voidmain(String[] args) throws IOException { psotest pso = new psotest(); pso.c(); File file = newFile("E:\\JavaProgram\\psotest\\input\\file.txt"); BufferedWriter out=new BufferedWriter(newFileWriter(file,true)); for(int i = 0; i < sizepop; i++){ out.write(i + " "); for(int j= 0; j < dim; j++) { out.write(pso.pop[i][j] + " "+ pso.V[i][j] + " " + pso.dpbest[i][j] + " " + pso.gbest[j] + ""); } out.write(pso.bestfitness[i]+" "+pso.g); out.newLine(); } out.close(); } } |
- 粒子群算法的MapReduce版:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
package test; import java.io.IOException; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; importorg.apache.hadoop.mapreduce.lib.input.FileInputFormat; importorg.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class posmr { public static classIntSumReducer extends Mapper<Object, Text, DoubleWritable,Text> { private DoubleWritable job1map_key = newDoubleWritable(); private Text job1map_value = new Text(); static int dim = 2; static int sizepop = 20; private final double w = 1; static double c1 = 1; static double c2 = 1; public double[] pop = new double[dim]; public double[] dpbest = new double[dim]; public double[] V = new double[dim]; public double[] fitness = newdouble[sizepop]; public double[] gbest = new double[dim]; public String[] S_value = new String[dim]; public String F_value; public double bestfitness; public double m_dFitness; private Random random = new Random(); double g; double sum1; double sum2; public void map(Object key, Text values, Contextcontext) throws IOException,InterruptedException { Stringitr[] = values.toString().split("\\s"); int k =1; F_value =""; for (int j= 0; j < dim; j++) { pop[j] =Double.valueOf((itr[k++])); V[j] =Double.valueOf((itr[k++])); dpbest[j] =Double.valueOf((itr[k++])); gbest[j] =Double.valueOf((itr[k++])); } bestfitness = Double.valueOf((itr[k++])); g =Double.valueOf((itr[k++])); for (int i= 0; i < dim; i++) { V[i] = w * V[i] + c1 *random.nextDouble() *(dpbest[i] - pop[i]) + c2 * random.nextDouble() *(gbest[i] - pop[i]); pop[i] = pop[i] + V[i]; } sum1 =0; sum2 =0; //计算Ackley 函数的值 for (int i= 0; i < dim; i++) { sum1 += pop[i] *pop[i]; sum2 += Math.cos(2 * Math.PI* pop[i]); } //m_dFitness 计算出的当前值 m_dFitness= -20 * Math.exp(-0.2 * Math.sqrt((1.0 / dim) * sum1)) - Math.exp((1.0 / dim) * sum2) + 20 +2.72; if(m_dFitness < 0) { System.out.println(sum1 + " "+ m_dFitness + " " + sum2); } if(m_dFitness < bestfitness) { bestfitness =m_dFitness; for (int i = 0; i< dim; i++) { dpbest[i] = pop[i]; } } for (int j= 0; j < dim; j++) { S_value[j] =Double.toString(pop[j]) + " " +Double.toString(V[j]) + " " +Double.toString(dpbest[j]) + " "; } for (int j= 0; j < dim; j++) { F_value += S_value[j]; } job1map_key.set(bestfitness); job1map_value.set(F_value); context.write(job1map_key, job1map_value); } } public static classjob1Reducer extends Reducer<DoubleWritable, Text, DoubleWritable,Text> { private DoubleWritable job1reduce_key = newDoubleWritable(); private Text job1reduce_value = newText(); static int dim = 2; static int sizepop = 20; public double[] pop = new double[dim]; public double[] dpbest = new double[dim]; public double[] V = new double[dim]; public double[] gbest = new double[dim]; public double[] gbest_temp = newdouble[dim]; public String[] S_value = new String[dim]; public String F_value; public double m_dFitness =Double.MAX_VALUE; public void reduce(DoubleWritable key,Iterable<Text> values, Context context) throwsIOException, InterruptedException { Doublebestfitness = Double.valueOf(key.toString()); intk; if(bestfitness < m_dFitness) { m_dFitness =bestfitness; for (Text val : values){ String itr[] = val.toString().split(" "); k = 0; for (int j = 0; j < dim; j++){ pop[j] =Double.valueOf((itr[k++])); V[j] =Double.valueOf((itr[k++])); dpbest[j]= Double.valueOf((itr[k++])); } for (int j = 0; j < dim; j++){ gbest[j] =dpbest[j]; gbest_temp[j] = dpbest[j]; } F_value = ""; for (int j = 0; j < dim; j++){ S_value[j]= Double.toString(pop[j]) + " " + Double.toString(V[j]) + " " + Double.toString(dpbest[j]) + " " + Double.toString(gbest[j]) + " "; } for (int j = 0; j < dim; j++){ F_value +=S_value[j]; } F_value += (Double.toString(bestfitness)) + "" +(Double.toString(m_dFitness)); job1reduce_key.set(1); job1reduce_value.set(F_value); context.write(job1reduce_key,job1reduce_value); } } else{ for (Text val : values){ String itr[] = val.toString().split(" "); k = 0; for (int j = 0; j < dim; j++){ pop[j] =Double.valueOf((itr[k++])); V[j] =Double.valueOf((itr[k++])); dpbest[j]= Double.valueOf((itr[k++])); } for (int j = 0; j < dim; j++){ gbest[j] =gbest_temp[j]; } F_value = ""; for (int j = 0; j < dim; j++){ S_value[j]= Double.toString(pop[j]) + " " + Double.toString(V[j]) + " " + Double.toString(dpbest[j]) + " " + Double.toString(gbest[j]) + " "; } for (int j = 0; j < dim; j++){ F_value +=S_value[j]; } F_value += (Double.toString(bestfitness)) + "" +(Double.toString(m_dFitness)); job1reduce_key.set(1); job1reduce_value.set(F_value); context.write(job1reduce_key,job1reduce_value); } } } } public static voidmain(String[] args) throws Exception { Configuration conf = new Configuration(); String[]otherArgs = new GenericOptionsParser(conf, args) .getRemainingArgs(); if(otherArgs.length != 2) { System.err.println("Usage:wordcount <in><out>"); System.exit(2); } Stringinput = otherArgs[0]; Stringoutput = otherArgs[1]; FileSystemfs; try{ fs =FileSystem.get(conf); int step = 20; for(int i = 0; i< step; i++) { System.out.println("第" + i + "次:" + i); Job job = new Job(conf, "word count"); job.setJarByClass(posmr.class); job.setMapperClass(IntSumReducer.class); job.setReducerClass(job1Reducer.class); job.setMapOutputKeyClass(DoubleWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, newPath(input)); FileOutputFormat.setOutputPath(job, newPath(output)); job.waitForCompletion(true); input = output; output += i; } } catch(IOException e) { e.printStackTrace(); } catch(InterruptedException e) { e.printStackTrace(); } catch(ClassNotFoundException e) { e.printStackTrace(); } } }