For Scala if you use the sbt this will also work.
In your build.sbt file:
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % "1.6.2",
"org.apache.spark" %% "spark-sql" % "1.6.2",
"org.apache.spark" %% "spark-mllib" % "1.6.2",
"mysql" % "mysql-connector-java" % "5.1.12"
)
Then you just need to declare your usage of the driver.
Class.forName("com.mysql.jdbc.Driver").newInstance
val conf = new SparkConf().setAppName("MY_APP_NAME").setMaster("MASTER")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
val data = sqlContext.read
.format("jdbc")
.option("url", "jdbc:mysql://:3306/")
.option("user", )
.option("password", )
.option("dbtable", "MYSQL_QUERY")
.load()