Sunday, 26 July 2020

SparkConf Example using Spark with Scala

//Without SparkConf

package myPackage
import org.apache.spark.sql.SparkSession
object my1st {
  def main(args:Array[String]) : Unit = {
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("Ex")
      .getOrCreate()
    val df = spark.read.format("json").load("D:/Ex/Spark-Tutorials-master/data/people.json")
    df.printSchema()
    df.show()
  }
}




//With SparkConf
package myPackage
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object my1st {
  def main(args:Array[String]) : Unit = {
    val sparkAppConf = new SparkConf()
    sparkAppConf.set("spark.app.name","Ex")
    sparkAppConf.set("spark.master","local[3]")

    val spark = SparkSession.builder()
      .config(sparkAppConf)
      .getOrCreate()

    val df = spark.read.format("json").load("D:/Ex/Spark-Tutorials-master/data/people.json")
    df.printSchema()
    df.show()
  }
}

No comments:

Post a Comment

Flume - Simple Demo

// create a folder in hdfs : $ hdfs dfs -mkdir /user/flumeExa // Create a shell script which generates : Hadoop in real world <n>...