zoukankan      html  css  js  c++  java
  • Spark2.x写Hbase1-2.x

    import org.apache.hadoop.hbase.io.ImmutableBytesWritable
    import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
    import org.apache.hadoop.hbase.client.Result
    import org.apache.hadoop.hbase.client.Put
    import org.apache.hadoop.mapreduce.Job
    import org.apache.hadoop.hbase.util.Bytes
    import org.apache.spark.{SparkConf, SparkContext}
    
    
    /**
      * Spark写HBase
      */
    object SparkWriteHbase {
    
      def main(args: Array[String]): Unit = {
    
        val conf = new SparkConf().setAppName("SparkWriteHBase").setMaster("local")
        val sc = new SparkContext(conf)
        val tableName = "student"
    
    
        sc.hadoopConfiguration.set(TableOutputFormat.OUTPUT_TABLE, tableName)
    
        val job = new Job(sc.hadoopConfiguration)
    
        job.setOutputKeyClass(classOf[ImmutableBytesWritable])
        job.setOutputValueClass(classOf[Result])
        job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])
    
    
    
        val inDataRDD = sc.makeRDD(Array("3,Rongcheng,M,26","4,Guanhua,M,27"))
    
        val rdd = inDataRDD.map(_.split(",")).map(arr=>{
          val put = new Put(Bytes.toBytes(arr(0)))
          put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("name"),Bytes.toBytes(arr(1)))
          put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("gender"),Bytes.toBytes(arr(2)))
          put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("age"),Bytes.toBytes(arr(3)))
          (new ImmutableBytesWritable(),put)
        })
    
        rdd.saveAsNewAPIHadoopDataset(job.getConfiguration)
    
    
      }
    
    }
  • 相关阅读:
    android.animation(6)
    android.animation(5)
    android.animation(4)
    android.animation(3)
    android.animation(2)
    android.animation(1)
    android.view.animation(2)
    php热身2:CRUD with Ajax
    PHP热身
    Android热身:通过网络获取资源并更新UI组件
  • 原文地址:https://www.cnblogs.com/zxbdboke/p/12749534.html
Copyright © 2011-2022 走看看