How to write unit tests in Spark 2.0+?

后端 未结 6 431
日久生厌
日久生厌 2020-11-29 16:00

I\'ve been trying to find a reasonable way to test SparkSession with the JUnit testing framework. While there seem to be good examples for SparkContext

6条回答
  •  一生所求
    2020-11-29 16:30

    You can write a simple test with FunSuite and BeforeAndAfterEach like below

    class Tests extends FunSuite with BeforeAndAfterEach {
    
      var sparkSession : SparkSession = _
      override def beforeEach() {
        sparkSession = SparkSession.builder().appName("udf testings")
          .master("local")
          .config("", "")
          .getOrCreate()
      }
    
      test("your test name here"){
        //your unit test assert here like below
        assert("True".toLowerCase == "true")
      }
    
      override def afterEach() {
        sparkSession.stop()
      }
    }
    

    You don't need to create a functions in test you can simply write as

    test ("test name") {//implementation and assert}
    

    Holden Karau has written really nice test spark-testing-base

    You need to check out below is a simple example

    class TestSharedSparkContext extends FunSuite with SharedSparkContext {
    
      val expectedResult = List(("a", 3),("b", 2),("c", 4))
    
      test("Word counts should be equal to expected") {
        verifyWordCount(Seq("c a a b a c b c c"))
      }
    
      def verifyWordCount(seq: Seq[String]): Unit = {
        assertResult(expectedResult)(new WordCount().transform(sc.makeRDD(seq)).collect().toList)
      }
    }
    

    Hope this helps!

提交回复
热议问题