标准模板代码
package com.lizh.hadoop.mapreduce;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import com.lizh.hadoop.mapreduce.WordCountMapReduce.WordCountMapper;
import com.lizh.hadoop.mapreduce.WordCountMapReduce.WordCountReduces;
public class MouldMapReduce extends Configured implements Tool{
public class MouldMap extends Mapper<LongWritable, Text, Text, IntWritable>{
@Override
protected void setup(Context context) throws IOException,
InterruptedException {
// TODO 读取数据前的一些初始化工作或者读取文件前的一些初始化工作
super.setup(context);
}
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
// TODO
super.map(key, value, context);
}
@Override
protected void cleanup(Context context) throws IOException,
InterruptedException {
// TODO Auto-generated method stub
super.cleanup(context);
}
}
public class MouldReduce extends Reducer<Text, IntWritable, Text,IntWritable>{
@Override
protected void setup(Context context)
throws IOException, InterruptedException {
// TODO 读取数据前的一些初始化工作或者读取文件前的一些初始化工作
super.setup(context);
}
@Override
protected void reduce(Text arg0, Iterable<IntWritable> arg1,Context arg2)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
super.reduce(arg0, arg1, arg2);
}
@Override
protected void cleanup(
org.apache.hadoop.mapreduce.Reducer.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
super.cleanup(context);
}
}
private Job getJob(String[] args){
Configuration configuration = this.getConf();
Job job = null;
try {
job = Job.getInstance(configuration, this.getClass().getSimpleName());
job.setJarByClass(this.getClass());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return job;
}
public int run(String[] args) throws Exception {
// TODO Auto-generated method stub
//input-->map--reduce--output
//getjob
Job job = getJob(args);
//setjob
Path path = new Path(args[0]);
FileInputFormat.addInputPath(job, path);
// map
job.setMapperClass(WordCountMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
// reduce
job.setReducerClass(WordCountReduces.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//output
Path outputpath = new Path(args[1]);
FileOutputFormat.setOutputPath(job, outputpath);
// submit job
boolean rv = job.waitForCompletion(true);//true的时候打印日志
return rv ? 0:1;
}
public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
ToolRunner.run(conf, new MouldMapReduce(), args);
}
}