目录
8、上传【jar】包以及【info.txt】文件至【/opt/soft/hadoop/share/hadoop/mapreduce】 下
1、分布式/伪分布式的hadoop环境【hadoop2.7.3】
2、win10本地Java环境【jdk8】
3、win10本地hadoop环境【2.7.3】
4、win10本地Maven环境【Maven3】
5、idea开发工具【IntelliJ IDEA 2020.1.3 x64】
需要建立【maven】项目,建立过程中
项目名称无所谓:
点击【file】->【Settings】后如下图操作,选择本地的Maven配置文件以及仓库地址。
引入【hadoop-client】
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.3</version>
</dependency>
</dependencies>
在【src的java】下创建【com.item.test】包,以及【MapAction】【ReduceAction】【Action】的类文件。
【MapAction】编码
package com.item.test;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class MapAction extends Mapper<LongWritable, Text, Text, LongWritable> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
char[] split = value.toString().toCharArray();
for (char s : split) {
context.write(new Text(s+""), new LongWritable(1));
}
}
}
【ReduceAction】编码
package com.item.test;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class ReduceAction extends Reducer<Text, LongWritable, Text, LongWritable> {
@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
long count = 0;
for (LongWritable value : values) {
count += value.get();
}
context.write(key, new LongWritable(count));
}
}
【Action】编码
package com.item.test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class Action {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(Action.class);
job.setMapperClass(MapAction.class);
job.setReducerClass(ReduceAction.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
//本地测试
FileInputFormat.setInputPaths(job,new Path("D:\\info.txt"));
FileOutputFormat.setOutputPath(job,new Path("D:\\infos"));
boolean b = job.waitForCompletion(true);
System.exit(b?0:1);
}
}
本地测试文件【D:\\info.txt】:
Accept that this is your starting point.Instead of placing judgements on it,see the real,positive value that’s already yours.You cannot change where your past priorities and choices have brought you.Yet you can make use of the wisdom you’ve earned to create a future that fulfils your most treasured dreams.
相信此刻就是你的起点。无需再判断,看到你所具备的真正意义上的价值。你过去认为重要的事和以前做过的选择给你带来了什么都是无法改变的。然而你可以充分利用你的智慧去创造未来,实现你最珍爱的梦想。
测试成功:
package com.item.test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class Action {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(Action.class);
job.setMapperClass(MapAction.class);
job.setReducerClass(ReduceAction.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
//本地测试
FileInputFormat.setInputPaths(job,new Path("/info/info.txt"));
FileOutputFormat.setOutputPath(job,new Path("/infos"));
boolean b = job.waitForCompletion(true);
System.exit(b?0:1);
}
}
依次点击【file】【Project Structure】
点击【Artfacts】【JAR】【From modules with dependencies...】
点击【OK】退出即可
点击【Build】【Build Artfacts...】
在项目层级中可以看到生成的【out】文件夹,打开后找到对应的【jar】包文件。
输入以下命令:
cd /opt/soft/hadoop/share/hadoop/mapreduce
hadoop fs -mkdir /info
hadoop fs -put info.txt /info/info.txt
hadoop fs -cat /info/info.txt
文件位置:
hadoop jar DEMO1.jar com/item/test/Action /info/info.txt /infos
生成文件位置:
11、查看生成结果
hadoop fs -cat /infos/part-r-00000
1、常用的【hdfs】命令一定要熟悉,用的很多。
2、在服务器中运行的时候不能使用【绝对地址D:\\类似的路径】
3、如果存在导出文件夹则会报错,故而生成的文件一定要放置在没有的文件夹内。