I\'m trying to run this code:
import pyspark
from pyspark.sql import SparkSession
spark = SparkSession.builder \\
.master(\"local\") \\
.app
the env var in .bash_profile or /etc/profile may not be accessed by your code ,put them in your code directly.
import os
import sys
os.environ['SPARK_HOME'] = "/opt/cloudera/parcels/SPARK2/lib/spark2"
os.environ['PYSPARK_SUBMIT_ARGS'] = "--master yarn pyspark-shell"
sys.path.append(os.path.join(os.environ['SPARK_HOME'], "python"))
sys.path.append(os.path.join(os.environ['SPARK_HOME'], "python/lib/py4j-0.10.6-src.zip"))
try:
from pyspark import SparkContext
from pyspark.sql import SparkSession
from pyspark import SparkConf
print("success")
except ImportError as e:
print("error importing spark modules", e)
sys.exit(1)