概述
//用MapReduce查找100万个数中的最大值
public class TopKApp {
static final String INPUT_PATH = "hdfs://chaoren:9000/input";
static final String OUT_PATH = "hdfs://chaoren:9000/out";
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
final Path outPath = new Path(OUT_PATH);
if(fileSystem.exists(outPath)){
fileSystem.delete(outPath, true);
}
final Job job = new Job(conf , TopKApp.class.getSimpleName());
//1.1指定读取的文件位于哪里
FileInputFormat.setInputPaths(job, INPUT_PATH);
job.setMapperClass(MyMapper.class);
//2.2 指定自定义reduce类
job.setReducerClass(MyReducer.class);
//指定reduce的输出类型
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(NullWritable.class);
//2.3 指定写出到哪里
FileOutputFormat.setOutputPath(job, outPath);
//把job提交给JobTracker运行
job.waitForCompletion(true);
}
static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
long max=Long.MIN_VALUE;
protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
//获得数字
final long temp = Long.parseLong(v1.toString());
if(temp>max){
max=temp;
}
};
//当所有的map函数执行完后会执行这个函数
@Override
protected void cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context)
throws IOException, InterruptedException {
context.write(new LongWritable(max), NullWritable.get());
}
}
static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
long max = Long.MIN_VALUE;
@Override
protected void reduce(LongWritable k2, Iterable<NullWritable> v2s,
Context arg2)
throws IOException, InterruptedException {
final long temp = k2.get();
if(temp>max){
max = temp;
}
}
//当所有的reduce()函数执行完后会执行这个函数
@Override
protected void cleanup(org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context context)
throws IOException, InterruptedException {
context.write(new LongWritable(max), NullWritable.get());
}
}
//用MapReduce查找100万个数中的最大值
public class TopKApp {
static final String INPUT_PATH = "hdfs://chaoren:9000/input";
static final String OUT_PATH = "hdfs://chaoren:9000/out";
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
final Path outPath = new Path(OUT_PATH);
if(fileSystem.exists(outPath)){
fileSystem.delete(outPath, true);
}
final Job job = new Job(conf , TopKApp.class.getSimpleName());
//1.1指定读取的文件位于哪里
FileInputFormat.setInputPaths(job, INPUT_PATH);
job.setMapperClass(MyMapper.class);
//2.2 指定自定义reduce类
job.setReducerClass(MyReducer.class);
//指定reduce的输出类型
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(NullWritable.class);
//2.3 指定写出到哪里
FileOutputFormat.setOutputPath(job, outPath);
//把job提交给JobTracker运行
job.waitForCompletion(true);
}
static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
long max=Long.MIN_VALUE;
protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
//获得数字
final long temp = Long.parseLong(v1.toString());
if(temp>max){
max=temp;
}
};
//当所有的map函数执行完后会执行这个函数
@Override
protected void cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context)
throws IOException, InterruptedException {
context.write(new LongWritable(max), NullWritable.get());
}
}
static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
long max = Long.MIN_VALUE;
@Override
protected void reduce(LongWritable k2, Iterable<NullWritable> v2s,
Context arg2)
throws IOException, InterruptedException {
final long temp = k2.get();
if(temp>max){
max = temp;
}
}
//当所有的reduce()函数执行完后会执行这个函数
@Override
protected void cleanup(org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context context)
throws IOException, InterruptedException {
context.write(new LongWritable(max), NullWritable.get());
}
}
最后
以上就是生动鸡为你收集整理的用MapReduce查找100万个数中的最大值的全部内容,希望文章能够帮你解决用MapReduce查找100万个数中的最大值所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
发表评论 取消回复