1:载mysql-connector 放入 jars下
2:在spark-env.sh中 配置EXTRA_SPARK_CLASSPATH环境变量
export EXTRA_SPARK_CLASSPATH=/home/fly/spark-2.1.1-bin-hadoop2.7/jars/
df = sqlContext.read.format("jdbc").options(url="jdbc:mysql://localhost:3306?user=root&useSSL=false", dbtable="db.tablename").load()
db203 = sqlContext.read.format("jdbc").option("url", "jdbc:mysql://localhost/db?useSSL=false").option("driver", "com.mysql.jdbc.Driver").option("dbtable", "user").option("user", "root").option("password", "").load()
df.show()