天天看點

MapReduce的基礎案例(一)WordCount,詞頻統計

文本文檔words.txt

hello tom
hello lina
hello tom
hello GPY
HI selina

           

結果樣式:

GPY	1
HI	1
hello	4
lina	1
selina	1
tom	2
           

Java代碼:

package MR;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WC{
//定義Mapper類,四個參數分别是(map讀入的偏移量類型,值類型,map即将輸出的鍵類型值類型)
    public static  class MyMap extends Mapper<LongWritable, Text, Text, IntWritable>{
        @Override
        protected void map(LongWritable key, Text value,Context context)
                throws IOException, InterruptedException {
            String[] fields = value.toString().split(" ");//把讀入的一行資料,按空格切割
            for(String s: fields){
                context.write(new Text(s), new IntWritable(1));//每一單詞都作為鍵值,并賦予value1,作為詞頻的基本數
            }
        }
    }

//定義reducer類,四個參數分别是(map輸出的鍵類型,值類型,reducer即将輸出的鍵類型值類型)
    public static class MyReduce extends Reducer<Text, IntWritable, Text, IntWritable>{
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values,//此時經過shuffle已經将value變為數組
                              Context context) throws IOException, InterruptedException {
            int count=0;
            for(IntWritable i:values){
                count+=i.get();//在ruducer裡把每個值相加
            }
            context.write(key, new IntWritable(count));//
        }
    }
    public static void main(String[] args) {
        Configuration conf=new Configuration();
        try {
            Job job=Job.getInstance(conf);
            job.setJarByClass(WC.class);//設定主類
            job.setMapperClass(MyMap.class);//加載map類
            job.setReducerClass(MyReduce.class);//加載reduce類

            job.setOutputKeyClass(Text.class);//輸出的KEY格式
            job.setOutputValueClass(IntWritable.class);//輸出的value格式

            Path inPath =new Path("/input/words.txt");
            FileInputFormat.addInputPath(job, inPath);

            Path outpath=new Path("/output/WordCount/result");
            if(outpath.getFileSystem(conf).exists(outpath)){
                outpath.getFileSystem(conf).delete(outpath, true);
            }
            FileOutputFormat.setOutputPath(job, outpath);

            job.waitForCompletion(true);
        } catch (Exception e) {
            // TODO Auto-generate
            //  .d catch block
            e.printStackTrace();
        }
    }
}


           

繼續閱讀