zoukankan      html  css  js  c++  java
  • Hadoop最基本的wordcount(统计词频)

    package com.uniclick.dapa.dstest;
    
    import java.io.IOException;
    import java.net.URI;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    
    public class WordCount {
    	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    		String inputFilePath = "/user/zhouyuanlong/wordcount/input/wordTest*.txt";
    		String outputFilePath = "/user/zhouyuanlong/wordcount/output/";
    		String queue = "default";
    		String jobName = "wordCount";
    		if(args == null || args.length < 2){
    			System.out.println("[-INPUT <inputFilePath>"
    					+ "[-OUTPUT <outputFilePath>");
    		}else{
    			for(int i=0;i<args.length;i++){
    				if("-Q".equals(args[i])){
    					queue = args[++i];
    				}
    			}
    		}
    		Configuration conf = new Configuration();
    		conf.set("mapred.job.queue.name", queue);
    		Job job = new Job(conf, jobName);
    		job.setJarByClass(WordCount.class);
    		job.setMapperClass(WordCountMapper.class);
    //		job.setCombinerClass(cls);
    		job.setReducerClass(WordCountReducer.class);
    		job.setOutputKeyClass(Text.class);
    		job.setOutputValueClass(IntWritable.class);
    		FileInputFormat.addInputPath(job, new Path(inputFilePath));
    		Path path = new Path(outputFilePath);
    		FileSystem fs = FileSystem.get(URI.create(outputFilePath), conf);
    		if(fs.exists(path)){
    //			fs.delete(path);
    			fs.delete(path, true);
    		}
    		FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    		System.exit(job.waitForCompletion(true) ? 1 : 0);
    	}
    	
    	public static class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
    		private Text kt = new Text();
    		private final static IntWritable vt = new IntWritable(1);
    
    		public void map(LongWritable key, Text value, Context context)
    				throws IOException, InterruptedException {
    			String[] arr = value.toString().split("	");
    			for(int i = 0; i < arr.length; i++){
    				kt.set(arr[i]);
    				context.write(kt, vt);
    			}
    		}
    	}
    	
    	public static class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
    		private IntWritable vt = new IntWritable();
    		
    		public void reduce(Text key, Iterable<IntWritable> values, Context context) 
    				throws IOException, InterruptedException{
    			int sum = 0;
    			for(IntWritable intVal : values){
    				sum += intVal.get();
    			}
    			vt.set(sum);
    			context.write(key, vt);
    		}
    	}
    	
    }
    


    input目录中文件wordTest1.txt的内容(每行以table键分隔):

    hello    world
    hello    hadoop
    hello    mapredruce


    input目录中文件wordTest2.txt的内容(每行以table键分隔):

    hello    world
    hello    hadoop
    hello    mapredruce

    hdfs输出结果:

    web     2
    mapredruce      1
    python  1
    hadoop  1
    hello   6
    clojure 2
    world   1
    java    2


    PS:对Hadoop自带的wordcount的例子略有改变

  • 相关阅读:
    solr8.4.1开发测试环境的简单应用
    spring aop + xmemcached 配置service层缓存策略
    git配置httpd服务-web_dav模式
    notepad++快捷键
    Eclipse默认快捷键说明
    maven&nexus_repository 私库搭建与使用
    CENTOS下搭建git代码仓库 ssh协议
    送给iOS求职者的一份硬核面试指南,你可以不优秀,但是你必须重视!
    2020年中高级iOS大厂面试宝典+答案
    iOS开发者经验总结:在腾讯的九年,我的成长之路和职业思考
  • 原文地址:https://www.cnblogs.com/snake-hand/p/3178012.html
Copyright © 2011-2022 走看看