代码
import scala.collection.mutable
scala> val a=spark.read.csv("/user/flink/qinghua/myconf.txt").rdd.map(x=> x.getString(0)->x.getString(1)).collectAsMap()
a: scala.collection.Map[String,String] = Map(date -> 20220103, name -> haha)
scala> a("date")
res13: String = 20220103
csv形式
date,20200202
name,lqh
key value格式