代码:
package com.wenbronk.sparkstreaming.scala.commons import java.time.Duration import io.lettuce.core.RedisURI import io.lettuce.core.cluster.api.StatefulRedisClusterConnection import io.lettuce.core.cluster.{ClusterClientOptions, ClusterTopologyRefreshOptions, RedisClusterClient} import scala.collection.immutable object RedisUtils { val ip = "10.110.122.172" val ports = Array(7000, 7001, 7002, 7003, 7004, 7005) def getRediceConnect: StatefulRedisClusterConnection[String, String] = { val uris: immutable.Seq[RedisURI] = ports.map(port => { RedisURI.builder().withHost(ip).withPort(port).build() }).toList import scala.collection.JavaConverters._ val redisClusterClient = RedisClusterClient.create(uris.asJava) val topologyRefreshOptions = ClusterTopologyRefreshOptions.builder.enablePeriodicRefresh(Duration.ofMinutes(10)).enableAllAdaptiveRefreshTriggers.build redisClusterClient.setOptions(ClusterClientOptions.builder.autoReconnect(true).pingBeforeActivateConnection(true).topologyRefreshOptions(topologyRefreshOptions).build) redisClusterClient.connect() } }
测试:
package com.wenbronk.sparkstreaming.test import com.wenbronk.sparkstreaming.scala.commons.RedisUtils import org.scalatest.FunSuite class RedisTests extends FunSuite{ test("redisSet") { val utils = RedisUtils val connect = utils.getRediceConnect connect.async().set("abc", "1234") println("hello") } test("redisGet") { val utils = RedisUtils val connect = utils.getRediceConnect val value = connect.async().get("abc") println(value.get()) } test("flushdb") { val utils = RedisUtils val connect = utils.getRediceConnect val value = connect.async().flushdb() println(value.get()) } }
scala测试可见: