天天看點

每日總結

Mapreduce執行個體——ChainMapReduce

依賴:

<dependency>

      <groupId>org.apache.hadoop</groupId>

      <artifactId>hadoop-common</artifactId>

      <version>3.2.0</version>

    </dependency>

    <dependency>

      <artifactId>hadoop-mapreduce-client-app</artifactId>

      <artifactId>hadoop-hdfs</artifactId>

      <groupId>org.slf4j</groupId>

      <artifactId>slf4j-log4j12</artifactId>

      <version>1.7.30</version>

      <artifactId>hadoop-client</artifactId>

</dependency>

實驗代碼:

package mapreduce;

import java.io.IOException;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.chain.ChainMapper;

import org.apache.hadoop.mapreduce.lib.chain.ChainReducer;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.io.DoubleWritable;

public class ChainMapReduce {

    private static final String INPUTPATH = "hdfs://hadoop102:8020/mymapreduce2/in/goods_0";

    private static final String OUTPUTPATH = "hdfs://hadoop102:8020/mymapreduce2/out8";

    public static void main(String[] args) {

        try {

            Configuration conf = new Configuration();

            FileSystem fileSystem = FileSystem.get(new URI(OUTPUTPATH), conf);

            if (fileSystem.exists(new Path(OUTPUTPATH))) {

                fileSystem.delete(new Path(OUTPUTPATH), true);

            }

            Job job = new Job(conf, ChainMapReduce.class.getSimpleName());

            FileInputFormat.addInputPath(job, new Path(INPUTPATH));

            job.setInputFormatClass(TextInputFormat.class);

            ChainMapper.addMapper(job, FilterMapper1.class, LongWritable.class, Text.class, Text.class, DoubleWritable.class, conf);

            ChainMapper.addMapper(job, FilterMapper2.class, Text.class, DoubleWritable.class, Text.class, DoubleWritable.class, conf);

            ChainReducer.setReducer(job, SumReducer.class, Text.class, DoubleWritable.class, Text.class, DoubleWritable.class, conf);

            ChainReducer.addMapper(job, FilterMapper3.class, Text.class, DoubleWritable.class, Text.class, DoubleWritable.class, conf);

            job.setMapOutputKeyClass(Text.class);

            job.setMapOutputValueClass(DoubleWritable.class);

            job.setPartitionerClass(HashPartitioner.class);

            job.setNumReduceTasks(1);

            job.setOutputKeyClass(Text.class);

            job.setOutputValueClass(DoubleWritable.class);

            FileOutputFormat.setOutputPath(job, new Path(OUTPUTPATH));

            job.setOutputFormatClass(TextOutputFormat.class);

            System.exit(job.waitForCompletion(true) ? 0 : 1);

        } catch (Exception e) {

            e.printStackTrace();

        }

    }

    public static class FilterMapper1 extends Mapper<LongWritable, Text, Text, DoubleWritable> {

        private Text outKey = new Text();

        private DoubleWritable outValue = new DoubleWritable();

        @Override

        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, DoubleWritable>.Context context)

                throws IOException, InterruptedException {

            String line = value.toString();

            if (line.length() > 0) {

                String[] splits = line.split("\t");

                double visit = Double.parseDouble(splits[1].trim());

                if (visit <= 600) {

                    outKey.set(splits[0]);

                    outValue.set(visit);

                    context.write(outKey, outValue);

                }

    public static class FilterMapper2 extends Mapper<Text, DoubleWritable, Text, DoubleWritable> {

        protected void map(Text key, DoubleWritable value, Mapper<Text, DoubleWritable, Text, DoubleWritable>.Context context)

            if (value.get() < 100) {

                context.write(key, value);

    public static class SumReducer extends Reducer<Text, DoubleWritable, Text, DoubleWritable> {

        protected void reduce(Text key, Iterable<DoubleWritable> values, Reducer<Text, DoubleWritable, Text, DoubleWritable>.Context context)

            double sum = 0;

            for (DoubleWritable val : values) {

                sum += val.get();

            outValue.set(sum);

            context.write(key, outValue);

    public static class FilterMapper3 extends Mapper<Text, DoubleWritable, Text, DoubleWritable> {

            if (key.toString().length() < 3) {

                System.out.println("寫出去的内容為:" + key.toString() + "++++" + value.toString());

}

每日總結
每日總結