作者:伊雅曦1 | 来源:互联网 | 2023-08-13 15:22
对有如下专利数据进行处理,把每个专利被引用的专利输出,代码如下packagecom.hadoop.test;importjava.io.IOException;i
对有如下专利数据
进行处理,把每个专利被引用的专利输出,代码如下
package com.hadoop.test;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* 对专利数据集进行处理,一个专利输出所有的引用的专利
*
* @author root 2014-12-28
*/
public class MyJob extends Configured implements Tool {
public static class MapClass extends MapReduceBase implements
Mapper {
@Override
public void map(Text key, Text value,
OutputCollector output, Reporter reporter)
throws IOException {
// TODO Auto-generated method stub
output.collect(value, key);
}
}
public static class Reduce extends MapReduceBase implements
Reducer {
@Override
public void reduce(Text key, Iterator values,
OutputCollector output, Reporter reduce)
throws IOException {
// TODO Auto-generated method stub
String csv = "";
while (values.hasNext()) {
if (csv.length() > 0)
csv += ",";
csv += values.next().toString();
}
output.collect(key, new Text(csv));
}
}
@Override
public int run(String[] arg0) throws Exception {
// TODO Auto-generated method stub
Configuration cOnf= getConf();
JobConf job = new JobConf(conf, MyJob.class);
// 输入输出路径
Path in = new Path(arg0[0]);
Path out = new Path(arg0[1]);
FileInputFormat.setInputPaths(job, in);
FileOutputFormat.setOutputPath(job, out);
// 设置job的名字
job.setJobName("MyJob");
job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class);
//设置K1,V1均为Text类型
job.setInputFormat(KeyValueTextInputFormat.class);
//设置输出到文本中的类型
job.setOutputFormat(TextOutputFormat.class);
//指定K2类型
job.setOutputKeyClass(Text.class);
//指定V2类型
job.setOutputValueClass(Text.class);
//对每一行以逗号分割
job.set("key.value.separator.in.input.line", ",");
JobClient.runJob(job);
return 0;
}
public static void main(String[] args) throws Exception {
// 定义本地输入文件路径
String inputPath = "/home/znb/test/cite75_99.txt";
// 定义输出的HDFS文件路径
String outputPath = "hdfs://znb:9000/output/MyJob/";
String[] args1 = { inputPath, outputPath };
int res = ToolRunner.run(new Configuration(), new MyJob(), args1);
System.exit(res);
}
}
输出的结果为