Map Dataframe
import org.apache.spark.sql.{Row , SparkSession} import org.apache.spark.sql.types.{MapType , StringType , StructField , StructType} object SparkProject { def main (args: Array[String]): Unit = { // Set log levels org.apache.log4j.LogManager. getLogger ( "org" ).setLevel(org.apache.log4j.Level. ERROR ) org.apache.log4j.LogManager. getLogger ( "akka" ).setLevel(org.apache.log4j.Level. ERROR ) // Create a Spark session val spark = SparkSession. builder () .master( "local[1]" ) .appName( "SparkByExample" ) .getOrCreate() // Define the schema for the DataFrame val schema = StructType (Seq( StructField( "name" , StringType , true ) , StructField( "songs" , MapType(StringType , StringType , true ) , true ) )) // Create a Seq of Rows representing the data val data = Seq( Row ( "sublime" , Map( "good_song" -> "santeria" , &qu