package spark.example.sql
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType
object Example1 {
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder()
.appName("Spark SQL basic example")
.master("local")
.config("spark.some.config.option", "some-value")
.getOrCreate()
import spark.implicits._
val file = "file:///C:\\github\\data\\sparkdata.txt"
val schema = StructType(
List(
StructField("name", StringType, true),
StructField("age", StringType, true),
StructField("email", StringType, true)))
val df = spark.read.csv(file)
val df2 = spark.createDataFrame(df.rdd, schema)
df2.select("name","age").show()
println("#len:" + df2.count())
df2.createOrReplaceTempView("people")
val sql = spark.sql("select name from people")
sql.show()
}
}
来源:oschina
链接:https://my.oschina.net/u/778683/blog/4272214