zoukankan      html  css  js  c++  java
  • es-hadoop saveToEsWithMeta

      @Test
      def testEsRDDWriteWithDynamicMapping() {
        val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
        val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)
    
        val target = wrapIndex("spark-test/scala-dyn-id-write")
    
        val pairRDD = sc.makeRDD(Seq((3, doc1), (4, doc2))).saveToEsWithMeta(target, cfg)
    
        assertEquals(2, EsSpark.esRDD(sc, target).count());
        assertTrue(RestUtils.exists(target + "/3"))
        assertTrue(RestUtils.exists(target + "/4"))
    
        assertThat(RestUtils.get(target + "/_search?"), containsString("SFO"))
      }
    
      @Test
      def testEsRDDWriteWithDynamicMapMapping() {
        val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
        val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)
    
        val target = wrapIndex("spark-test/scala-dyn-id-write")
    
        val metadata1 = Map(ID -> 5, TTL -> "1d")
        val metadata2 = Map(ID -> 6, TTL -> "2d", VERSION -> "23")
    
        assertEquals(5, metadata1.getOrElse(ID, null))
        assertEquals(6, metadata2.getOrElse(ID, null))
    
        val pairRDD = sc.makeRDD(Seq((metadata1, doc1), (metadata2, doc2)))
    
        pairRDD.saveToEsWithMeta(target, cfg)
    
        assertTrue(RestUtils.exists(target + "/5"))
        assertTrue(RestUtils.exists(target + "/6"))
    
        assertThat(RestUtils.get(target + "/_search?"), containsString("SFO"))
      }

     spark-2.0.0-bin-hadoop2.6/bin/spark-shell --jars elasticsearch-hadoop-5.0.1/dist/elasticsearch-spark-20_2.11-5.0.1.jar

    注意版本对应关系!

    import org.apache.spark.SparkConf
    import org.elasticsearch.spark._
    
    sc.getConf.setMaster("local").setAppName("RDDTest").set("es.nodes", "127.0.0.1").set("es.index.auto.create", "true");
    val numbers = Map("one" -> 1, "two" -> 2, "three" -> 3)
    val airports = Map("OTP" -> "Otopeni", "SFO" -> "San Fran")
    val r=sc.makeRDD(Seq(numbers, airports))
    r.saveToEs("spark/data")
    
    val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
    val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)
    val pairRDD = sc.makeRDD(Seq((3, doc1), (4, doc2)))
    pairRDD.saveToEsWithMeta("data/test")

    可以看到ES请求data/test/3中id为3的文档,data/test/4中id为4的文档!

  • 相关阅读:
    被initramfs的一个要求搞死
    【转】网卡驱动注册到PCI总线这一过程的分析
    PCI总线
    diff和patch使用指南
    Windows 平台下构建Android 开发环境
    linux 如何使用大于2T的块设备
    PCI设备驱动 三
    CFI Flash, JEDEC Flash ,Parellel Flash, SPI Flash, Nand Flash,Nor Flash的区别和联系
    使用initramfs启动Linux成功
    glibc 各个库作用介绍
  • 原文地址:https://www.cnblogs.com/bonelee/p/6099236.html
Copyright © 2011-2022 走看看