//Without SparkConf
package myPackage
import org.apache.spark.sql.SparkSession
object my1st {
def main(args:Array[String]) : Unit = {
val spark = SparkSession.builder()
.master("local[2]")
.appName("Ex")
.getOrCreate()
val df = spark.read.format("json").load("D:/Ex/Spark-Tutorials-master/data/people.json")
df.printSchema()
df.show()
}
}
//With SparkConf
package myPackage
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object my1st {
def main(args:Array[String]) : Unit = {
val sparkAppConf = new SparkConf()
sparkAppConf.set("spark.app.name","Ex")
sparkAppConf.set("spark.master","local[3]")
val spark = SparkSession.builder()
.config(sparkAppConf)
.getOrCreate()
val df = spark.read.format("json").load("D:/Ex/Spark-Tutorials-master/data/people.json")
df.printSchema()
df.show()
}
}
No comments:
Post a Comment