作者:平凡无间2010 | 来源:互联网 | 2023-09-15 18:38
MapReduce---计数器的使用需求及数据代码实现自定义Student类编写Map类编写DriTest结果需求及数据数据A,40,70,78B,27,30,84C,73,88,
MapReduce---计数器的使用
- 需求及数据
- 代码实现
- 自定义Student类
- 编写Map类
- 编写DriTest
- 结果
需求及数据
A,40,70,78
B,27,30,84
C,73,88,78
D,75,29,44
E,93,85,15
F,21,4,77
G,50,11,62
H,3,64,18
I,75,31,3
J,38,88,23
K,75,79,26
L,87,69,69
M,45,15,43
N,49,63,82
O,57,44,12
P,59,8,38
Q,40,68,3
R,78,18,75
S,79,47,21
T,19,45,79
U,84,5,30
V,67,80,64
W,87,22,11
X,11,51,68
Y,88,25,33
Z,31,60,22
字段分别对应姓名,语文成绩,数学成绩,英语成绩
- 需求
统计各科及格与不及格的人数,按总成绩降序排序
代码实现
自定义Student类
自定义对象来存储数据
import org.apache.hadoop.io.WritableComparable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
public class Student implements WritableComparable {
private String name;
private int chinese;
private int math;
private int english;
private int sum;
public int compareTo(Student o) {
if (o.sum - this.sum == 0) {
return o.chinese - this.chinese;
}
return o.sum - this.sum;
}
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeUTF(name);
dataOutput.writeInt(chinese);
dataOutput.writeInt(math);
dataOutput.writeInt(english);
dataOutput.writeInt(sum);
}
public void readFields(DataInput dataInput) throws IOException {
name = dataInput.readUTF();
chinese = dataInput.readInt();
math = dataInput.readInt();
english = dataInput.readInt();
sum = dataInput.readInt();
}
public void set(String name, int chinese, int math, int english) {
this.name = name;
this.chinese = chinese;
this.math = math;
this.english = english;
this.sum = chinese+math+english;
}
@Override
public String toString() {
return name+"\t"+chinese+"\t"+math+"\t"+english+"\t"+sum;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getChinese() {
return chinese;
}
public void setChinese(int chinese) {
this.chinese = chinese;
}
public int getMath() {
return math;
}
public void setMath(int math) {
this.math = math;
}
public int getEnglish() {
return english;
}
public void setEnglish(int english) {
this.english = english;
}
public int getSum() {
return sum;
}
public void setSum(int sum) {
this.sum = sum;
}
}
编写Map类
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class MapTest extends Mapper {
Student k = new Student();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String datas [] = value.toString().split(",");
if (Integer.parseInt(datas[1])>=60){
context.getCounter("等第","语文及格").increment(1);
}else {
context.getCounter("等第","语文不及格").increment(1);
}
if (Integer.parseInt(datas[2])>=60){
context.getCounter("等第","数学及格").increment(1);
}else {
context.getCounter("等第","数学不及格").increment(1);
}
if (Integer.parseInt(datas[3])>=60){
context.getCounter("等第","英语及格").increment(1);
}else {
context.getCounter("等第","英语不及格").increment(1);
}
k.set(datas[0],Integer.parseInt(datas[1]),Integer.parseInt(datas[2]),Integer.parseInt(datas[3]));
context.write(k,NullWritable.get());
}
}
编写DriTest
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.File;
public class DriTest {
public static void main(String[] args) throws Exception {
File file = new File("D:\\MP\\counter\\output");
if (file.exists()) {
delFile(file);
driver();
} else {
driver();
}
}
public static void delFile(File file) {
File[] files = file.listFiles();
if (files != null && files.length != 0) {
for (int i = 0; i delFile(files[i]);
}
}
file.delete();
}
public static void driver() throws Exception {
Configuration cOnf= new Configuration();
Job job = Job.getInstance(conf);
job.setMapperClass(MapTest.class);
job.setJarByClass(DriTest.class);
//job.setReducerClass(RedTest.class);
job.setMapOutputKeyClass(Student.class);
job.setMapOutputValueClass(NullWritable.class);
// job.setOutputKeyClass(Student.class);
// job.setOutputValueClass(NullWritable.class);
FileInputFormat.setInputPaths(job, "D:\\MP\\counter\\input");
FileOutputFormat.setOutputPath(job, new Path("D:\\MP\\counter\\output"));
boolean b = job.waitForCompletion(true);
System.exit(b ? 0 : 1);
}
结果