Mapreduce案例之Pi值估算

题目:

这个程序的原理是这样的。假如有一个边长为1的正方形。以正方形的一个端点为圆心,以1为半径,画一个圆弧,于是在正方形内就有了一个直角扇形。在正方形里随机生成若干的点,则有些点是在扇形内,有些点是在扇形外。正方形的面积是1,扇形的面积是0.25*Pi。设点的数量一共是n,扇形内的点数量是nc,在点足够多足够密集的情况下,会近似有nc/n的比值约等于扇形面积与正方形面积的比值,也就是nc/n= 0.25*Pi/1,即Pi = 4*nc/n

实现思路:

通过map读入文件,文件内容为投掷次数,暂时设定为100次,共10次。

然后map中,生成随机数,即x y点的坐标,计算点到(0,0)的距离,如果小于1加入到计数器in中,大于1则加入计数器out中,然后计算出pi值

reduce中,将求得的pi值再次进行求平均值。

代码如下

package Demo3;

/**
 * @author  星际毁灭
 * 使用算法随机生成xy的坐标
 * */
public class Pi {
    static int digit = 40;
    private int[] bases= new int[2];
    private double[] baseDigit = new double[2];
    private double[][] background = new double[2][digit];
    private long index;
    
    Pi(int[] base) {
        bases = base.clone();
        index = 0;
 
        for(int i=0; i<bases.length; i++) {
            double b = 1.0/bases[i];
            baseDigit[i] = b;
            for(int j=0; j<digit; j++) {
                background[i][j] = j == 0 ? b : background[i][j-1]*b;
            }
        }
    }
    
    double[] getNext() {
        index++;
        
        double[] result = {0,0};
 
        for(int i=0; i<bases.length; i++) {
            long num = index;
            int j = 0;
            while(num != 0) {
                result[i] += num % bases[i] * background[i][j++];
                num /= bases[i];
            }
        }
        
        return result;
    }
    
    public static void main(String[] args) {
        int[] base = {2,5};
        Pi test = new Pi(base);
        for(int x = 0; x < 100; x++){
            double[] t = test.getNext();
            System.out.println(t[0] + "	" + t[1]);
        }
        
    }


}
package Demo3;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import Demo1.WordCountTest;
/**
 * @author 星际毁灭
 * 求pi的值
 * 
 * */
public class GetPoint {
    public static class Map extends Mapper<Object , Text , Text , Text>{  
        private static Text newKey=new Text();  
        private static final IntWritable one = new IntWritable(1);
        public void map(Object key,Text value,Mapper<Object, Text, Text, Text>.Context context) throws IOException, InterruptedException{  
            String line=value.toString();  
            int num=Integer.parseInt(line);  //读取生成的点的数量
            int[] base = {2,5};             //生成pi的xy坐标
            Pi test = new Pi(base);            //生成pi的xy坐标
            int in=0;                       //在圆内
            int out=0;                        //在圆外
            newKey.set("pi");
            System.out.println(num);
            for(int x = 0; x < num; x++){
                double[] t = test.getNext();//生成pi的xy坐标
                //System.out.println(t[0] + "	" + t[1]);
                if(t[0]*t[0]+t[1]*t[1]<=1) { //该点到原点的距离小于等于1
                    in++; 
                }else {
                    out++;    
                }
            }
            double pi=4.0000000000*in/num;    //求pi的值
            context.write(newKey,new Text(pi+""));  //输出结果
         }
    }  
    public static class Reduce extends Reducer<Text, Text, Text, Text>{  
        public void reduce(Text key,Iterable<Text> values,Context context) throws IOException, InterruptedException{  
            double sum=0;
            int num=0;
            for(Text val:values){  //求均值
                sum+=Double.parseDouble(val.toString());
                num++;
                //context.write(key,val);
            }  
            double pi=sum/num;    //求pi的值
            String p=""+pi;
            context.write(key,new Text(p));  //输出结果
            } 
     }  

     public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException{  
            System.setProperty("hadoop.home.dir", "H:\文件\hadoop\hadoop-2.6.4");
            Configuration conf=new Configuration();  
            Path in=new Path("hdfs://192.168.6.132:9000/wys/in/pi.txt");  
            Path out=new Path("hdfs://192.168.6.132:9000/wys/out/piout");  
//            FileInputFormat.setMaxInputSplitSize(job, size);
            Job job =new Job(conf,"OneSort");  
            FileInputFormat.addInputPath(job,in);  
            FileOutputFormat.setOutputPath(job,out);  
            
            job.setJarByClass(GetPoint.class);  
            job.setMapperClass(GetPoint.Map.class);
            job.setReducerClass(GetPoint.Reduce.class);
            
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            job.waitForCompletion(true);
                  
            System.exit(job.waitForCompletion(true) ? 0 : 1);      
      } 
    
}
原文地址:https://www.cnblogs.com/wys-373/p/11455557.html