zoukankan      html  css  js  c++  java
  • 周总结四

    周总结四

    核心代码

    package com.mapr;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import java.io.IOException;
    
    public class WordCountDriver {
        public static void main(String[]args)throws IOException, ClassNotFoundException, InterruptedException{
            // 1 获取配置信息以及封装任务
            Configuration configuration = new Configuration();
            Job job = Job.getInstance(configuration);
            // 2 设置jar加载路径
            job.setJarByClass(WordCountDriver.class);
            // 3 设置map和reduce类
            job.setMapperClass(WordCountMapper.class);
            job.setReducerClass(WordCountReducer.class);
            // 4 设置map输出
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            // 5 设置Reduce输出
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
            // 6 设置输入和输出路径
            FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.132.128:9000/mymapreduce1/in/MapReduceTry.txt"));
            FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.132.128:9000/mymapreduce1/out"));
            // 7 提交
            job.waitForCompletion(true);
        }
    }
    
    package com.mapr;
    
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapreduce.Mapper;
    import java.io.IOException;
    
    public class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable>{
        Text k=new Text();
        IntWritable v=new IntWritable(1);
        protected void map(LongWritable key,Text value,Context context)throws IOException,InterruptedException{
            String line=value.toString();
            String[] words=line.split(" ");
            for(String word:words){
                k.set(word);
                context.write(k,v);
            }
        }
    }
    package com.mapr;
    
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapreduce.Reducer;
    import java.io.IOException;
    
    public class WordCountReducer extends Reducer<Text,IntWritable,Text,IntWritable>{
        int sum;
        IntWritable v=new IntWritable();
        protected void reduce(Text key, Iterable<IntWritable> values,Context context)throws IOException,InterruptedException{
            sum=0;
            for(IntWritable count:values){
                sum+=count.get();
            }
            v.set(sum);
            context.write(key,v);
        }
    }
    

    MapReduce实现字符统计,wordcount实现,
    平均每天学习时间:2小时
    代码 700行

  • 相关阅读:
    《Kubernetes权威指南第2版》学习(四)kubernetes基本概念和术语
    《Kubernetes权威指南第2版》学习(三)RC学习
    HTTP 1.1 的HOST 与 虚拟IP(待续)
    HTTP 2 VS HTTP 1.1
    HTTP 的若干问题
    HTTP 协议入门(转载)
    java web 基础 json 和 javaBean转化
    java web基础学习 Forward和Redirect区别
    【雅思】金山词霸-单词学习(41-80)
    【数据库】left join(左关联)、right join(右关联)、inner join(自关联)的区别
  • 原文地址:https://www.cnblogs.com/2506236179zhw/p/14226688.html
Copyright © 2011-2022 走看看