zoukankan      html  css  js  c++  java
  • spark读hdfs文件实现wordcount并将结果存回hdfs

    package iie.udps.example.operator.spark;
    
    import scala.Tuple2;
    
    import org.apache.spark.SparkConf;
    import org.apache.spark.api.java.JavaPairRDD;
    import org.apache.spark.api.java.JavaRDD;
    import org.apache.spark.api.java.JavaSparkContext;
    import org.apache.spark.api.java.function.FlatMapFunction;
    import org.apache.spark.api.java.function.Function;
    import org.apache.spark.api.java.function.Function2;
    import org.apache.spark.api.java.function.PairFunction;
    import java.util.Arrays;
    import java.util.regex.Pattern;
    
    /**
     * 利用Spark框架读取HDFS文件,实现WordCount示例
     * 
     * 执行命令:spark-submit --class iie.hadoop.hcatalog.TextFileSparkTest --master
     * yarn-cluster /tmp/sparkTest.jar hdfs://192.168.8.101/test/words
     * hdfs://192.168.8.101/test/spark/out
     * 
     * @author xiaodongfang
     *
     */
    public final class TextFileSparkTest {
    	private static final Pattern SPACE = Pattern.compile(" ");
    
    	@SuppressWarnings("serial")
    	public static void main(String[] args) throws Exception {
    
    		if (args.length < 2) {
    			System.err.println("Usage: JavaWordCount <file>");
    			System.exit(1);
    		}
    		String inputSparkFile = args[0];
    		String outputSparkFile = args[1];
    
    		SparkConf sparkConf = new SparkConf().setAppName("SparkWordCount");
    		JavaSparkContext ctx = new JavaSparkContext(sparkConf);
    		JavaRDD<String> lines = ctx.textFile(inputSparkFile, 1);
    		JavaRDD<String> words = lines
    				.flatMap(new FlatMapFunction<String, String>() {
    					@Override
    					public Iterable<String> call(String s) {
    						return Arrays.asList(SPACE.split(s));
    					}
    				});
    
    		JavaPairRDD<String, Integer> ones = words
    				.mapToPair(new PairFunction<String, String, Integer>() {
    
    					@Override
    					public Tuple2<String, Integer> call(String s) {
    						return new Tuple2<String, Integer>(s, 1);
    					}
    				});
    		
    
    		JavaPairRDD<String, Integer> counts = ones
    				.reduceByKey(new Function2<Integer, Integer, Integer>() {
    					@Override
    					public Integer call(Integer i1, Integer i2) {
    						return i1 + i2;
    					}
    				});
    
    		counts.map(new Function<Tuple2<String, Integer>, String>() {
    			@Override
    			public String call(Tuple2<String, Integer> arg0) throws Exception {
    				return arg0._1.toUpperCase() + ": " + arg0._2;
    			}
    		}).saveAsTextFile(outputSparkFile);
    
    		ctx.stop();
    	}
    }
    

      

  • 相关阅读:
    shell内置命令eval的具有什么作用
    openwrt中如何在一个软件包中使能busybox中的工具
    go语言中strings包中的Trim函数的作用是什么
    RedisTemplate的各种操作(set、hash、list、string)
    Spring data redis-StringRedisTemplate 用法
    Spring-data-redis 第一天
    Java操作Redis数据
    BootStrap之X-editable插件使用
    bootstrap editable有默认值
    bootstrap editable初始化后表单
  • 原文地址:https://www.cnblogs.com/xiaodf/p/5027178.html
Copyright © 2011-2022 走看看