package com.laoxiao.mr; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class RunTest { public static void main(String[] args) { Configuration config = new Configuration(); config.set("fs.defaultFS", "hdfs://node9:9000"); config.set("yarn.resourcemanager.hostname", "node9"); try { FileSystem fs =FileSystem.get(config); Job job =Job.getInstance(config); job.setMapperClass(FriendsMapper.class); job.setReducerClass(FriendReduer.class); job.setMapOutputKeyClass(Friend.class); job.setMapOutputValueClass(IntWritable.class); //璁剧疆map task 杈撳叆鏁版嵁鐨勬牸寮� job.setInputFormatClass(KeyValueTextInputFormat.class); FileInputFormat.addInputPath(job, new Path("/input/data")); Path outpath =new Path("/output/friends"); if(fs.exists(outpath)){ fs.delete(outpath, true); } FileOutputFormat.setOutputPath(job, outpath); boolean f= job.waitForCompletion(true); if(f){ System.out.println("mr 鎴愬姛鎵ц!"); } } catch (Exception e) { e.printStackTrace(); } } }