这是一个txt文件 里面是一些数据
27 41 39 29 51 45 24 28 56 52 29 51 18 25 19 10 52 37 18 25 23 52 19 33 59 24 39 58 51 12
44 10 42 19 35 28 39 33 58 45 28 35 26 55 32 33 27 40 10 31 42 15 41 56 42 47 40 45 28 52
第一步: WordCountMap
package com.czxy;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class WordCountMap extends Mapper {
@Override
protected void map(LongWritable key,Text value,Context context) throws IOException, InterruptedException {
String datas = value.toString();
String[] s = datas.split(" ");
for (String s1 : s) {
if ("".equals(s1)){
continue;
}
context.write(new Text(datas),new LongWritable(Integer.parseInt(s1)));
}
}
}
第二步:WordCountReduce
package com.czxy;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class WordCountReduce extends Reducer {
@Override
protected void reduce(Text key,Iterablevalues,Context context) throws IOException, InterruptedException {
long sum=0;
for (LongWritable value : values) {
sum=sum+value.get();
}
context.write(key,new LongWritable(sum));
}
}
第三步:WordCountDriver
package com.czxy;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner;
public class WordCountDriver extends Configured implements Tool { @Override public int run(String[] args) throws Exception {
//将已经编写好的Map Reduce 添加到计算框架中
//1. 实例一个job
Job job = Job.getInstance(new Configuration(), "WordCount");
//2. 使用job 设置读取数据(包括数据的路径)
job.setInputFormatClass(TextInputFormat.class);
TextInputFormat.addInputPath(job,new Path("D:\\wordcount\\input\\SumData.txt"));
//3. 使用job设置MAP类(map 输出的类型)
job.setMapperClass(WordCountMap.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
//4. 使用job 设置Reduce类(Reduce 输入和输出的类型)
job.setReducerClass(WordCountReduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
//5. 使用job 设置数据的输出路径
job.setOutputFormatClass(TextOutputFormat.class);
TextOutputFormat.setOutputPath(job,new Path("D:\\wordcount\\output\\a.txt"));
//6. 返回执行状态编号
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception {
int run = ToolRunner.run(new WordCountDriver(), args);
}
}
最后查看 D:\wordcount\output\a.txt
27 41 39 29 51 45 24 28 56 52 29 51 18 25 19 10 52 37 18 25 23 52 19 33 59 24 39 58 51 12 1046 (总和)
44 10 42 19 35 28 39 33 58 45 28 35 26 55 32 33 27 40 10 31 42 15 41 56 42 47 40 45 28 52 1078 (总和)
后续还会更新更多的内容 点个关注吧!