zoukankan      html  css  js  c++  java
  • Hadoop最基本的wordcount(统计词频)

    package com.uniclick.dapa.dstest;
    
    import java.io.IOException;
    import java.net.URI;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    
    public class WordCount {
    	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    		String inputFilePath = "/user/zhouyuanlong/wordcount/input/wordTest*.txt";
    		String outputFilePath = "/user/zhouyuanlong/wordcount/output/";
    		String queue = "default";
    		String jobName = "wordCount";
    		if(args == null || args.length < 2){
    			System.out.println("[-INPUT <inputFilePath>"
    					+ "[-OUTPUT <outputFilePath>");
    		}else{
    			for(int i=0;i<args.length;i++){
    				if("-Q".equals(args[i])){
    					queue = args[++i];
    				}
    			}
    		}
    		Configuration conf = new Configuration();
    		conf.set("mapred.job.queue.name", queue);
    		Job job = new Job(conf, jobName);
    		job.setJarByClass(WordCount.class);
    		job.setMapperClass(WordCountMapper.class);
    //		job.setCombinerClass(cls);
    		job.setReducerClass(WordCountReducer.class);
    		job.setOutputKeyClass(Text.class);
    		job.setOutputValueClass(IntWritable.class);
    		FileInputFormat.addInputPath(job, new Path(inputFilePath));
    		Path path = new Path(outputFilePath);
    		FileSystem fs = FileSystem.get(URI.create(outputFilePath), conf);
    		if(fs.exists(path)){
    //			fs.delete(path);
    			fs.delete(path, true);
    		}
    		FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    		System.exit(job.waitForCompletion(true) ? 1 : 0);
    	}
    	
    	public static class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
    		private Text kt = new Text();
    		private final static IntWritable vt = new IntWritable(1);
    
    		public void map(LongWritable key, Text value, Context context)
    				throws IOException, InterruptedException {
    			String[] arr = value.toString().split("	");
    			for(int i = 0; i < arr.length; i++){
    				kt.set(arr[i]);
    				context.write(kt, vt);
    			}
    		}
    	}
    	
    	public static class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
    		private IntWritable vt = new IntWritable();
    		
    		public void reduce(Text key, Iterable<IntWritable> values, Context context) 
    				throws IOException, InterruptedException{
    			int sum = 0;
    			for(IntWritable intVal : values){
    				sum += intVal.get();
    			}
    			vt.set(sum);
    			context.write(key, vt);
    		}
    	}
    	
    }
    


    input目录中文件wordTest1.txt的内容(每行以table键分隔):

    hello    world
    hello    hadoop
    hello    mapredruce


    input目录中文件wordTest2.txt的内容(每行以table键分隔):

    hello    world
    hello    hadoop
    hello    mapredruce

    hdfs输出结果:

    web     2
    mapredruce      1
    python  1
    hadoop  1
    hello   6
    clojure 2
    world   1
    java    2


    PS:对Hadoop自带的wordcount的例子略有改变

  • 相关阅读:
    「CF1380G」 Circular Dungeon
    「CF1208G」 Polygons
    P4827「国家集训队」 Crash 的文明世界
    「CF85E」 Guard Towers
    「BZOJ 2956」模积和
    「HEOI2016/TJOI2016」排序
    CF277E Binary Tree on Plane
    「SDOI2016」数字配对
    HNOI2020「Elegy」
    CSP-S2019「Symphony」
  • 原文地址:https://www.cnblogs.com/snake-hand/p/3178012.html
Copyright © 2011-2022 走看看